Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- 05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-19062f93-c43f-40da-a8e7-aee672713bd11750887279735-2025_06_25-23.35.33.315/source.csv +0 -0
- 05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-9ff54a43-2a59-41a8-96bc-f7e46d5244651750887279734-2025_06_25-23.36.32.560/source.csv +3 -0
- 05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-cb92c7b2-f6e4-4d49-91cb-88397630081c1750964172563-2025_06_26-20.56.24.104/source.csv +0 -0
- 05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-cf821b01-6b29-41b7-8f83-b619919a05b21750839417809-2025_06_25-10.17.13.417/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-106a30cc-3f72-45f3-9927-7b65fe48a1281767368581712-2026_01_02-16.43.12.537/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-1a879c19-f454-4c6d-ab47-63d2934123911767867211896-2026_01_08-11.14.30.646/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-1e45f155-81cc-410e-818b-93e33f5637c71767610593907-2026_01_05-11.56.49.330/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-2512f40f-958c-4b3c-9d8f-16c4a1d22b551767694549505-2026_01_06-11.16.10.387/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-26ae656d-f995-4b35-a709-1f5ab8f828a11767630468773-2026_01_05-17.28.30.525/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-495b78a3-8c04-4965-88fa-979c320df6561767630491949-2026_01_05-17.28.24.506/source.csv +3 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-76c4fb2d-2203-42b8-92ab-0caa4cb609801767555314125-2026_01_04-20.35.48.386/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-805359c7-8c4e-40bd-8498-d35dee25aba61767776424373-2026_01_07-10.01.10.541/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-886b09eb-c0c8-475a-a603-3035b8c283c31767617997118-2026_01_05-14.00.32.126/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-908d3d62-c6b7-4344-9d22-e32a888120bb1767803103606-2026_01_07-17.25.13.88/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-9c8a2990-0ff1-494a-9fe7-635f887f66211767803500134-2026_01_07-17.32.14.102/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-9ec11139-792c-4543-b766-eaf97004d3091767717466518-2026_01_06-17.38.21.548/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-c9ffd4ad-ae84-4e96-99d2-7212edb4dcc21767023143212-2025_12_29-16.46.49.639/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-df110f6f-a3c9-4641-90da-f30934c027e01767707463233-2026_01_06-14.51.27.372/source.csv +64 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-e69fee36-85ea-4c2a-bf7e-90b6490333df1767532068697-2026_01_04-14.08.44.100/source.csv +0 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-f4bf9883-3801-446a-98c6-413295d94c701767091439743-2025_12_30-11.44.46.167/source.csv +360 -0
- 58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-f9c548ba-e7ae-418f-bcd9-3b3e771f5fa01767372765713-2026_01_02-17.53.20.698/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-08cafcbe-d0e5-4505-ac95-8b9050d84d731759228460178-2025_09_30-12.34.56.10/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-10196e97-c322-40bf-836a-16ee811908931758807420822-2025_09_25-15.37.22.442/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1e70ca6b-f2dc-4f0c-81bb-b7d403b4df271752242192153-2025_07_11-15.56.51.266/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1f51b8ea-81c1-4db7-8702-1416f8c1c0cc1751376377945-2025_07_01-15.27.44.831/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-242fa472-b5db-492d-8b66-f482468772b21757500459062-2025_09_10-12.34.42.52/source.csv +4 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-27fd9e5e-b562-49ba-9321-8ed11ebad94f1756718814603-2025_09_01-11.27.16.489/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2b01d9ad-2c11-4b6b-bc1b-f335a6c7dd4a1750840473876-2025_06_25-10.34.49.55/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2cce3a90-32a5-4d8b-8cb0-10445a2ee7a71754054463184-2025_08_01-15.21.32.127/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2e25d757-859b-4fde-ba77-792b0eb397df1759579674644-2025_10_04-14.09.15.386/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2f4869ae-f0d3-4e60-80d2-8655e52f1ea31751064760332-2025_06_28-00.52.51.957/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3ba207b9-2f18-4919-a6bb-bebae1f850441758203079280-2025_09_18-15.45.09.509/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3ebbac58-2f0e-41b5-a15d-9a2b6b0c20ab1758725119572-2025_09_24-16.46.25.34/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-411c0b26-1d5f-4194-8163-38afd5728d3d1756886238975-2025_09_03-09.59.08.217/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-43fbfe2e-f9a4-4bb4-acf4-2bdbf37810851757006149083-2025_09_04-19.16.32.851/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-4b6051ce-1cfc-4dca-874b-0d0d7270d33f1753454394749-2025_07_25-16.42.21.379/source.csv +125 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-50d32311-0df8-4235-b597-7d69f06b72151752666637262-2025_07_16-13.50.55.636/source.csv +143 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-527e6f79-68be-4672-b584-2b15b1cf78281754060417734-2025_08_01-17.00.27.872/source.csv +1141 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5e6eb49e-ed8b-4688-9226-2f7ce3bdd3501759657457038-2025_10_05-11.44.58.692/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-614215de-0f98-45e4-a49e-9e7d7e57cff91757422920029-2025_09_09-15.02.34.03/source.csv +198 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-629e4b25-2201-4663-a7f2-936116295b151757499356136-2025_09_10-12.16.59.652/source.csv +3 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6488ed25-a64b-4b96-ae90-de59d09eaf2d1759672300294-2025_10_05-15.52.33.721/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-65955a28-c516-4123-abaf-6681358bdea31753192468219-2025_07_22-15.55.21.96/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6aec90c8-8d95-4bad-afcb-92c28c6ff5241753889052956-2025_07_30-17.24.41.914/source.csv +33 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-761e4728-7320-4c5d-bc55-ad231839bb781753709851371-2025_07_28-15.39.08.844/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-76d20b24-d9be-4730-bca0-3f5c7d0460a01758996810958-2025_09_27-20.14.06.310/source.csv +0 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c8743a1-f55a-4e45-b7aa-0b3df3c9f3c91752835699286-2025_07_18-12.49.02.294/source.csv +394 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7ec41ffb-ddad-4e1d-b171-0513171669281757061617849-2025_09_05-10.41.46.448/source.csv +44 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-92e279fe-1f26-4694-b71c-d8f950b76bd11757499356165-2025_09_10-12.16.58.191/source.csv +463 -0
- 927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9c2d9ac2-2076-4ff2-8381-5264acd089541759350296590-2025_10_01-22.25.31.836/source.csv +8 -0
05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-19062f93-c43f-40da-a8e7-aee672713bd11750887279735-2025_06_25-23.35.33.315/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-9ff54a43-2a59-41a8-96bc-f7e46d5244651750887279734-2025_06_25-23.36.32.560/source.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,18,"scripts/file_duplicate_checker.py",0,0,"import os\nfrom collections import defaultdict\nfrom tqdm import tqdm\n\ndef find_duplicate_filenames(root_dir):\n filenames = defaultdict(list)\n file_count = 0\n\n # Use tqdm with manual update and no percentage/ETA bar\n pbar = tqdm(desc=""Files scanned"", unit=""file"", dynamic_ncols=True, bar_format=""{desc}: {n_fmt}"")\n\n # Walk the directory recursively\n for dirpath, _, files in os.walk(root_dir):\n for file in files:\n full_path = os.path.join(dirpath, file)\n if os.path.isfile(full_path):\n filenames[file].append(full_path)\n file_count += 1\n pbar.update(1)\n\n pbar.close()\n\n # Print duplicates\n duplicates = {name: paths for name, paths in filenames.items() if len(paths) > 1}\n if duplicates:\n print(""\nDuplicate filenames found:\n"")\n for name, paths in duplicates.items():\n print(f""Filename: {name}"")\n for path in paths:\n print(f"" - {path}"")\n print()\n else:\n print(""\nNo duplicate filenames found."")\n\nif __name__ == ""__main__"":\n import sys\n if len(sys.argv) < 2:\n print(""Usage: python find_duplicates.py <directory_path>"")\n else:\n find_duplicate_filenames(sys.argv[1])\n\n",python,tab
|
| 3 |
+
2,521,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"11:36:32 PM [info] Activating crowd-code\n11:36:32 PM [info] Welcome back tum_ind3695. Your user-id is '507ab0ec0dfe0c18ad7778dd15e072f92367194c94623114de802c8ed9c52e20'. Happy coding!\n11:36:32 PM [info] Recording started\n",Log,tab
|
05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-cb92c7b2-f6e4-4d49-91cb-88397630081c1750964172563-2025_06_26-20.56.24.104/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-cf821b01-6b29-41b7-8f83-b619919a05b21750839417809-2025_06_25-10.17.13.417/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-106a30cc-3f72-45f3-9927-7b65fe48a1281767368581712-2026_01_02-16.43.12.537/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-1a879c19-f454-4c6d-ab47-63d2934123911767867211896-2026_01_08-11.14.30.646/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-1e45f155-81cc-410e-818b-93e33f5637c71767610593907-2026_01_05-11.56.49.330/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-2512f40f-958c-4b3c-9d8f-16c4a1d22b551767694549505-2026_01_06-11.16.10.387/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-26ae656d-f995-4b35-a709-1f5ab8f828a11767630468773-2026_01_05-17.28.30.525/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-495b78a3-8c04-4965-88fa-979c320df6561767630491949-2026_01_05-17.28.24.506/source.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,464,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:28:24 PM [info] Activating crowd-code\n5:28:24 PM [info] Recording started\n5:28:24 PM [info] Initializing git provider using file system watchers...\n5:28:24 PM [error] Not a git repository: EntryNotFound (FileSystemError): Error: ENOENT: no such file or directory, stat '/fast/project/HFMI_SynergyUnit/tab_model/data/qwen/.git'\n",Log,tab
|
| 3 |
+
3,2064,"extension-output-pdoom-org.crowd-code-#1-crowd-code",329,0,"5:28:26 PM [info] Retrying git provider initialization...\n5:28:26 PM [error] Not a git repository: EntryNotFound (FileSystemError): Error: ENOENT: no such file or directory, stat '/fast/project/HFMI_SynergyUnit/tab_model/data/qwen/.git'\n",Log,content
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-76c4fb2d-2203-42b8-92ab-0caa4cb609801767555314125-2026_01_04-20.35.48.386/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-805359c7-8c4e-40bd-8498-d35dee25aba61767776424373-2026_01_07-10.01.10.541/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-886b09eb-c0c8-475a-a603-3035b8c283c31767617997118-2026_01_05-14.00.32.126/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-908d3d62-c6b7-4344-9d22-e32a888120bb1767803103606-2026_01_07-17.25.13.88/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-9c8a2990-0ff1-494a-9fe7-635f887f66211767803500134-2026_01_07-17.32.14.102/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-9ec11139-792c-4543-b766-eaf97004d3091767717466518-2026_01_06-17.38.21.548/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-c9ffd4ad-ae84-4e96-99d2-7212edb4dcc21767023143212-2025_12_29-16.46.49.639/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-df110f6f-a3c9-4641-90da-f30934c027e01767707463233-2026_01_06-14.51.27.372/source.csv
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,4,"tab-model-eval/src/evaluation/sglang_eval.py",0,0,"import asyncio\nimport json\nimport os\nimport sys\nimport subprocess\nimport time\nimport wandb\nfrom dataclasses import dataclass, field\nfrom typing import Dict, Any, List, Optional\n\nimport httpx\nimport tyro\nfrom openai import AsyncOpenAI, BadRequestError\nfrom tqdm.asyncio import tqdm_asyncio\n\nclass LocalLogger:\n """"""A simple local logger that saves metrics to JSON files for later sync to wandb.""""""\n \n def __init__(self, log_dir: str, run_id: str, run_name: str, project: str, config: dict = None, tags: list = None):\n self.log_dir = os.path.join(log_dir, run_id)\n os.makedirs(self.log_dir, exist_ok=True)\n self.run_id = run_id\n self.run_name = run_name\n self.project = project\n self.config = config or {}\n self.tags = tags or []\n self.metrics_file = os.path.join(self.log_dir, ""metrics.jsonl"")\n \n # Save run metadata\n metadata_file = os.path.join(self.log_dir, ""metadata.json"")\n with open(metadata_file, ""w"") as f:\n json.dump({\n ""run_id"": run_id,\n ""run_name"": run_name,\n ""project"": project,\n ""config"": config,\n ""tags"": tags,\n ""created_at"": time.strftime(""%Y-%m-%dT%H:%M:%S"")\n }, f, indent=2)\n \n print(f""LocalLogger initialized. Logs will be saved to: {self.log_dir}"")\n \n def log(self, metrics: dict):\n """"""Append metrics to the JSONL file.""""""\n metrics_with_timestamp = {\n ""timestamp"": time.strftime(""%Y-%m-%dT%H:%M:%S""),\n **metrics\n }\n with open(self.metrics_file, ""a"") as f:\n f.write(json.dumps(metrics_with_timestamp) + ""\n"")\n print(f""Logged metrics to {self.metrics_file}: eval_step={metrics.get('eval_step', 'N/A')}"")\n \n def finish(self):\n """"""Called when logging is complete.""""""\n print(f""LocalLogger finished. All logs saved to: {self.log_dir}"")\n\n\n# ----------------------------\n# Argument definitions\n# ----------------------------\n@dataclass\nclass Args:\n # Eval-related\n wandb_project: str = ""llm-coding-agent""\n wandb_name: str = ""validation_set_eval""\n wandb_eval_type: str = ""next_action_validation_set""\n wandb_tags: list[str] = field(default_factory=lambda: [""val_mini"", ""judge_eval""])\n wandb_id: str | None = None\n wandb_group: str = ""debug""\n \n # Single-file mode (backward compatible)\n generations_file: str = """"\n evaluations_file: str = """"\n eval_step: int = 0\n \n # Batch mode: comma-separated lists of files and steps\n # When these are provided, they take precedence over single-file args\n generations_files: str = """" # Comma-separated list of generation files\n evaluations_files: str = """" # Comma-separated list of evaluation output files\n eval_steps: str = """" # Comma-separated list of eval steps (integers)\n \n limit: int = -1\n system_prompt_file: str = ""data/prompts/judge_system_prompt_v2.md""\n judge_name: str = ""default""\n judge_prompt_file: str = ""data/prompts/judge_prompt_v2.md""\n judge_prompt_file_with_context: str = ""data/prompts/judge_prompt_v2_with_context.md""\n\n # Local logging for offline mode\n use_local_logger: bool = False\n local_log_dir: str = ""data/eval/local_logs""\n\n # Server-related (sglang)\n judge_model_path: str = ""Qwen/Qwen3-Coder-30B-A3B-Instruct""\n server_host: str = ""0.0.0.0""\n server_port: int = 30000\n context_length: int = 40960\n problem_length: int = 40960\n api_key: str = ""EMPTY"" # sglang's OpenAI-compatible server ignores this value\n mem_fraction_static: float = 0.95\n tp_size: int = 1\n\n # Client-related\n temperature: float = 0.7\n top_p: float = 0.8\n presence_penalty: float = 1.5\n top_k: int = 20\n min_p: float = 0.0\n enable_thinking: bool = True\n\n # HTTP / client config\n concurrency: int = 16\n max_connections: int = 256\n keepalive: int = 60\n max_attempts: int = 6\n timeout: float = 30.0\n\n # Control whether to launch server from this script\n launch_server: bool = True\n # Extra args passed to `sglang.launch_server` if needed\n extra_server_args: Optional[List[str]] = None\n \n def get_eval_jobs(self) -> List[tuple[str, str, int]]:\n """"""\n Returns a list of (generations_file, evaluations_file, eval_step) tuples.\n If batch mode args are provided, uses those. Otherwise falls back to single-file mode.\n """"""\n if self.generations_files and self.evaluations_files and self.eval_steps:\n # Batch mode\n gen_files = [f.strip() for f in self.generations_files.split("","") if f.strip()]\n eval_files = [f.strip() for f in self.evaluations_files.split("","") if f.strip()]\n steps = [int(s.strip()) for s in self.eval_steps.split("","") if s.strip()]\n \n if not (len(gen_files) == len(eval_files) == len(steps)):\n raise ValueError(\n f""Batch mode requires equal-length lists for generations_files ({len(gen_files)}), ""\n f""evaluations_files ({len(eval_files)}), and eval_steps ({len(steps)})""\n )\n \n return list(zip(gen_files, eval_files, steps))\n elif self.generations_file and self.evaluations_file:\n # Single-file mode (backward compatible)\n return [(self.generations_file, self.evaluations_file, self.eval_step)]\n else:\n raise ValueError(\n ""Either provide single-file args (generations_file, evaluations_file) ""\n ""or batch args (generations_files, evaluations_files, eval_steps)""\n )\n\n\n# ----------------------------\n# Dataset helpers\n# ----------------------------\ndef load_dataset(filepath):\n with open(filepath, ""r"") as f:\n return json.loads(f.read())\n\n\ndef estimate_token_count(messages: List[Dict[str, str]]) -> int:\n """"""\n Rough estimate of token count for a list of messages.\n Assumes ~3 characters per token as a conservative estimate.\n """"""\n total_chars = sum(len(msg.get(""content"", """")) for msg in messages)\n return total_chars // 3\n\n\ndef filter_tasks_by_context_length(\n test_cases: List[Dict[str, Any]],\n system_prompt: str,\n prompt_template: str,\n max_context_length: int = 40960,\n problem_length: int = 40960,\n buffer_tokens: int = 512, # Reserve space for response\n include_context: bool = False,\n) -> tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:\n """"""\n Filter out test cases whose context would exceed the model's context length.\n Returns (valid_cases, skipped_cases)\n """"""\n valid_cases = []\n skipped_cases = []\n\n for tc in test_cases:\n # Estimate tokens for system prompt + context\n messages = [{""role"": ""system"", ""content"": system_prompt}]\n if include_context:\n messages.extend(tc[""context""])\n messages.append({""role"": ""user"", ""content"": prompt_template})\n estimated_tokens = estimate_token_count(messages)\n\n length = estimated_tokens + buffer_tokens\n if length <= max_context_length and length <= problem_length:\n valid_cases.append(tc)\n else:\n print(\n f""Skipping {tc['task_id']}: estimated {estimated_tokens} tokens (limit: {max_context_length}, problem_length: {problem_length})""\n )\n skipped_cases.append(\n {\n ""task_id"": tc[""task_id""],\n ""estimated_tokens"": estimated_tokens,\n ""reason"": ""context_too_long"",\n }\n )\n\n return valid_cases, skipped_cases\n\n\n# ----------------------------\n# Eval logic\n# ----------------------------\nasync def evaluate_generated_command(\n client: AsyncOpenAI,\n sem: asyncio.Semaphore,\n test_case: Dict[str, Any],\n args: Args,\n system_prompt: str,\n prompt_template: str,\n include_context: bool,\n) -> Dict[str, Any]:\n """"""\n Handles a single evaluation task with concurrency control and retries.\n """"""\n async with sem:\n delay = 0.25\n\n if test_case.get(""error"", None) is not None:\n print(f""Returning failure object for task {test_case['task_id']} due to error"")\n return {\n ""task_id"": test_case[""task_id""],\n ""error"": test_case[""error""],\n ""is_correct"": 0,\n ""average_score"": 0.0,\n }\n\n samples = test_case.get(""samples"", [])\n if not samples:\n print(f""Returning failure object for task {test_case['task_id']} due to no samples"")\n return {\n ""task_id"": test_case[""task_id""],\n ""error"": ""No samples"",\n ""is_correct"": 0,\n ""average_score"": 0.0,\n }\n\n sample_results = []\n for sample in samples:\n for attempt in range(args.max_attempts):\n try:\n format_dict = {\n ""expected"": test_case[""expected_command""],\n ""generated"": sample[""generated_command""],\n }\n if include_context:\n format_dict[""context""] = json.dumps(test_case[""context""], indent=2)\n prompt = prompt_template.format(**format_dict)\n\n messages = [\n {\n ""role"": ""system"",\n ""content"": system_prompt,\n },\n {""role"": ""user"", ""content"": prompt},\n ]\n\n resp = await client.chat.completions.create(\n model=args.judge_name,\n messages=messages,\n temperature=args.temperature,\n top_p=args.top_p,\n presence_penalty=args.presence_penalty,\n response_format={""type"": ""json_object""},\n extra_body={\n ""top_k"": args.top_k,\n },\n )\n\n thinking_trace = getattr(resp.choices[0].message, ""reasoning_content"", """")\n result = json.loads(resp.choices[0].message.content)\n equivalent = result.get(""equivalent"", 0)\n\n sample_results.append(\n {\n ""generated_command"": sample[""generated_command""],\n ""thinking_trace"": thinking_trace,\n ""evaluation_results"": result,\n ""equivalent"": equivalent,\n ""exact_match"": sample[""exact_match""],\n }\n )\n break\n\n except BadRequestError as e:\n print(\n f""Returning failure object for task {test_case['task_id']} due to BadRequestError: {e}""\n )\n sample_results.append(\n {\n ""task_id"": test_case[""task_id""],\n ""error"": str(e),\n ""equivalent"": 0,\n ""exact_match"": 0,\n }\n )\n break\n\n except Exception as e:\n print(f""Error on task {test_case['task_id']}: {e}"")\n if attempt == args.max_attempts - 1:\n print(f""Returning failure object for task {test_case['task_id']}"")\n sample_results.append(\n {\n ""task_id"": test_case[""task_id""],\n ""error"": str(e),\n ""equivalent"": 0,\n }\n )\n await asyncio.sleep(delay)\n delay *= 2\n\n # Compute avg@n and pass@n\n num_judge_matches = sum(s.get(""equivalent"", 0) for s in sample_results)\n judge_avg_at_n = num_judge_matches / len(sample_results)\n judge_pass_at_n = int(num_judge_matches > 0)\n num_exact_matches = test_case.get(""num_exact_matches"", 0)\n\n return {\n ""task_id"": test_case[""task_id""],\n ""context"": test_case[""context""],\n ""expected_command"": test_case[""expected_command""],\n ""sample_evaluations"": sample_results,\n ""num_samples"": len(sample_results),\n ""num_judge_matches"": num_judge_matches,\n ""judge_avg_at_n"": judge_avg_at_n,\n ""judge_pass_at_n"": judge_pass_at_n,\n ""num_exact_matches"": num_exact_matches,\n }\n\n\nasync def run_single_eval(\n args: Args,\n generations_file: str,\n evaluations_file: str,\n eval_step: int,\n client: AsyncOpenAI,\n sem: asyncio.Semaphore,\n system_prompt: str,\n prompt_template: str,\n include_context: bool,\n logger: Optional[LocalLogger] = None,\n) -> Dict[str, Any]:\n """"""\n Evaluate a single generations file and write results to evaluations file.\n Uses shared client and semaphore for efficiency in batch mode.\n Returns the evaluation scores dictionary.\n """"""\n print(f""\n{'='*60}"")\n print(f""Evaluating: {generations_file}"")\n print(f""Output: {evaluations_file}"")\n print(f""Step: {eval_step}"")\n print(f""{'='*60}"")\n \n loaded_data = load_dataset(generations_file)\n test_cases = loaded_data[""generation_results""]\n\n config_generations = loaded_data[""config_generations""]\n config_evaluations = args.__dict__\n metadata = {\n ""config_generations"": config_generations,\n ""config_evaluations"": config_evaluations,\n }\n\n if args.limit > 0:\n test_cases = test_cases[: args.limit]\n\n # Filter out tasks with context that's too long\n test_cases, skipped_cases = filter_tasks_by_context_length(\n test_cases,\n system_prompt=system_prompt,\n prompt_template=prompt_template,\n max_context_length=args.context_length,\n problem_length=args.problem_length,\n buffer_tokens=512,\n include_context=include_context,\n )\n\n print(f""\nFiltered dataset:"")\n print(f"" Valid test cases: {len(test_cases)}"")\n print(f"" Skipped (too long): {len(skipped_cases)}"")\n print()\n\n # Clean output\n if os.path.exists(evaluations_file):\n os.remove(evaluations_file)\n\n tasks = [\n evaluate_generated_command(\n client, sem, tc, args, system_prompt, prompt_template, include_context\n )\n for tc in test_cases\n ]\n\n print(f""Running {len(test_cases)} test cases with concurrency={args.concurrency} ..."")\n results: List[Dict[str, Any]] = []\n\n # progress bar over async tasks\n for coro in tqdm_asyncio.as_completed(tasks, total=len(tasks)):\n results.append(await coro)\n\n # sort the results by task_id\n results.sort(key=lambda x: x[""task_id""])\n\n os.makedirs(os.path.dirname(evaluations_file), exist_ok=True)\n total_judge_avg_at_n = sum(r.get(""judge_avg_at_n"", 0) for r in results) / len(results)\n total_judge_pass_at_n = sum(r.get(""judge_pass_at_n"", 0) for r in results)\n\n total_exact_match_avg_at_n = loaded_data[""generation_scores""][""total_exact_match_avg_at_n""]\n total_exact_match_pass_at_n = loaded_data[""generation_scores""][""total_exact_match_pass_at_n""]\n\n # Prepare metrics to log\n metrics_to_log = {\n ""eval_step"": eval_step,\n f""{args.wandb_eval_type}/total_test_cases"": len(test_cases),\n f""{args.wandb_eval_type}/num_samples_per_task"": loaded_data[""config_generations""][\n ""num_samples""\n ],\n f""{args.wandb_eval_type}/total_judge_avg_at_n"": total_judge_avg_at_n,\n f""{args.wandb_eval_type}/total_judge_pass_at_n"": total_judge_pass_at_n,\n f""{args.wandb_eval_type}/total_exact_match_avg_at_n"": total_exact_match_avg_at_n,\n f""{args.wandb_eval_type}/total_exact_match_pass_at_n"": total_exact_match_pass_at_n,\n }\n \n # Log metrics using appropriate logger\n if args.use_local_logger:\n logger.log(metrics_to_log)\n else:\n wandb.log(metrics_to_log)\n\n with open(evaluations_file, ""w"") as f:\n json.dump(\n {\n ""metadata"": metadata,\n ""evaluation_scores"": {\n ""total_test_cases"": len(test_cases),\n ""num_samples_per_task"": loaded_data[""config_generations""][""num_samples""],\n ""total_judge_avg_at_n"": total_judge_avg_at_n,\n ""total_judge_pass_at_n"": total_judge_pass_at_n,\n ""total_exact_match_avg_at_n"": total_exact_match_avg_at_n,\n ""total_exact_match_pass_at_n"": total_exact_match_pass_at_n,\n ""max_attempts"": args.max_attempts,\n },\n ""generation_results"": results,\n },\n f,\n indent=2,\n )\n\n print(""\n"" + ""="" * 50)\n print(f""--- Evaluation Complete (step {eval_step}) ---"")\n print(""="" * 50)\n print(f""Total Test Cases: {len(test_cases)}"")\n print(f""Total Judge Pass At N: {total_judge_pass_at_n}"")\n print(f""Total Judge Avg At N: {total_judge_avg_at_n * 100:.2f}%"")\n print(f""Total Exact Match Pass At N: {total_exact_match_pass_at_n}"")\n print(f""Total Exact Match Avg At N: {total_exact_match_avg_at_n * 100:.2f}%"")\n print(f""Evaluations output file: {evaluations_file}"")\n\n return {\n ""eval_step"": eval_step,\n ""generations_file"": generations_file,\n ""evaluations_file"": evaluations_file,\n ""total_test_cases"": len(test_cases),\n ""total_judge_avg_at_n"": total_judge_avg_at_n,\n ""total_judge_pass_at_n"": total_judge_pass_at_n,\n ""total_exact_match_avg_at_n"": total_exact_match_avg_at_n,\n ""total_exact_match_pass_at_n"": total_exact_match_pass_at_n,\n }\n\n\nasync def run_batch_eval(args: Args, base_url: str):\n """"""\n Run evaluation on multiple generation files with a single model load.\n This avoids the overhead of loading/unloading the judge model for each checkpoint.\n """"""\n eval_jobs = args.get_eval_jobs()\n \n print(f""\n{'#'*60}"")\n print(f""# BATCH EVALUATION MODE"")\n print(f""# Processing {len(eval_jobs)} evaluation job(s)"")\n print(f""{'#'*60}\n"")\n \n for i, (gen_file, eval_file, step) in enumerate(eval_jobs):\n print(f"" [{i+1}/{len(eval_jobs)}] Step {step}: {gen_file}"")\n print()\n \n # Load prompts once (shared across all evaluations)\n with open(args.system_prompt_file, ""r"") as f:\n system_prompt = f.read()\n\n include_context = bool(args.judge_prompt_file_with_context)\n judge_prompt_file = args.judge_prompt_file_with_context or args.judge_prompt_file\n\n with open(judge_prompt_file, ""r"") as f:\n prompt_template = f.read()\n\n # Initialize logger (local or wandb) - shared across all evaluations\n logger = None\n if args.use_local_logger:\n run_id = args.wandb_id or args.wandb_name\n logger = LocalLogger(\n log_dir=args.local_log_dir,\n run_id=run_id,\n run_name=args.wandb_name,\n project=args.wandb_project,\n config={""batch_mode"": True, ""num_jobs"": len(eval_jobs)},\n tags=args.wandb_tags,\n )\n else:\n wandb_init_kwargs = {\n ""project"": args.wandb_project,\n ""name"": args.wandb_name,\n ""tags"": args.wandb_tags,\n ""group"": args.wandb_group,\n ""config"": {""batch_mode"": True, ""num_jobs"": len(eval_jobs)},\n }\n\n if args.wandb_id:\n wandb_dir = os.path.join(os.getcwd(), ""eval_logs"", args.wandb_id)\n os.makedirs(wandb_dir, exist_ok=True)\n os.environ[""WANDB_DIR""] = wandb_dir\n os.environ[""WANDB_RESUME""] = ""allow""\n \n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n ""dir"": wandb_dir,\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # Reuse a single HTTP/2 client with a large pool (shared across all evaluations)\n http = httpx.AsyncClient(\n http2=True,\n timeout=httpx.Timeout(args.timeout, connect=10.0, read=args.timeout),\n limits=httpx.Limits(\n max_connections=args.max_connections,\n max_keepalive_connections=args.max_connections,\n keepalive_expiry=args.keepalive,\n ),\n headers={""Connection"": ""keep-alive""},\n )\n client = AsyncOpenAI(\n base_url=base_url,\n api_key=args.api_key,\n http_client=http,\n )\n sem = asyncio.Semaphore(args.concurrency)\n\n # Process each evaluation job sequentially\n all_results = []\n for i, (gen_file, eval_file, step) in enumerate(eval_jobs):\n print(f""\n[{i+1}/{len(eval_jobs)}] Processing step {step}..."")\n \n result = await run_single_eval(\n args=args,\n generations_file=gen_file,\n evaluations_file=eval_file,\n eval_step=step,\n client=client,\n sem=sem,\n system_prompt=system_prompt,\n prompt_template=prompt_template,\n include_context=include_context,\n logger=logger,\n )\n all_results.append(result)\n\n await http.aclose()\n \n # Finish logging\n if args.use_local_logger:\n logger.finish()\n else:\n wandb.finish()\n\n # Print summary\n print(""\n"" + ""#"" * 60)\n print(""# BATCH EVALUATION SUMMARY"")\n print(""#"" * 60)\n for r in all_results:\n print(f"" Step {r['eval_step']:>5}: Judge Avg@N = {r['total_judge_avg_at_n']*100:5.2f}%, ""\n f""Pass@N = {r['total_judge_pass_at_n']}, ""\n f""Exact Avg@N = {r['total_exact_match_avg_at_n']*100:5.2f}%"")\n print(""#"" * 60)\n\n\n# ----------------------------\n# Server launch + waiting\n# ----------------------------\nasync def wait_for_server(base_url: str, timeout: float = 600.0) -> None:\n """"""\n Poll the server's OpenAI-compatible endpoint until it responds or timeout.\n We'll try a lightweight call to /models.\n """"""\n print(f""Waiting for server at {base_url} ..."")\n deadline = asyncio.get_event_loop().time() + timeout\n\n async with httpx.AsyncClient() as client:\n while True:\n now = asyncio.get_event_loop().time()\n if now > deadline:\n raise RuntimeError(\n f""Server at {base_url} did not become ready within {timeout} seconds.""\n )\n try:\n resp = await client.get(f""{base_url}/v1/models"", timeout=5.0)\n if resp.status_code == 200:\n print(""Server is up."")\n return\n else:\n print(f""Server not ready yet (status {resp.status_code}); retrying..."")\n except Exception as e:\n print(f""Server not ready yet ({e}); retrying..."")\n await asyncio.sleep(10.0)\n\n\ndef launch_sglang_server(args: Args) -> subprocess.Popen:\n """"""\n Launch sglang server as a subprocess.\n You should have `module load CUDA/12.8` and `source .venv/bin/activate`\n done in your shell before running this script.\n """"""\n cmd = [\n sys.executable,\n ""-m"",\n ""sglang.launch_server"",\n ""--model-path"",\n args.judge_model_path,\n ""--host"",\n args.server_host,\n ""--port"",\n str(args.server_port),\n ""--context-length"",\n str(args.context_length),\n ""--mem-fraction-static"",\n str(args.mem_fraction_static),\n ""--tp-size"",\n str(args.tp_size),\n ]\n\n if args.extra_server_args:\n cmd.extend(args.extra_server_args)\n\n print(""Launching sglang server:"")\n print("" "" + "" "".join(cmd))\n\n env = os.environ.copy()\n proc = subprocess.Popen(\n cmd,\n env=env,\n stdout=sys.stdout,\n stderr=sys.stderr,\n )\n return proc\n\n\n# ----------------------------\n# Main\n# ----------------------------\nasync def amain(args: Args):\n base_url = f""http://{args.server_host}:{args.server_port}/v1""\n print(f""Using server at {base_url}"")\n\n server_proc: Optional[subprocess.Popen] = None\n try:\n if args.launch_server:\n server_proc = launch_sglang_server(args)\n await wait_for_server(f""http://{args.server_host}:{args.server_port}"")\n\n await run_batch_eval(args, base_url=base_url)\n\n finally:\n if server_proc is not None:\n print(""Shutting down sglang server ..."")\n server_proc.terminate()\n try:\n server_proc.wait(timeout=30)\n except subprocess.TimeoutExpired:\n print(""Server did not exit in time; killing."")\n server_proc.kill()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n asyncio.run(amain(args))\n print(""Done"")\n",python,tab
|
| 3 |
+
2,237,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:51:27 PM [info] Activating crowd-code\n2:51:27 PM [info] Recording started\n2:51:27 PM [info] Initializing git provider using file system watchers...\n2:51:27 PM [info] Git repository found\n2:51:27 PM [info] Git provider initialized successfully\n2:51:27 PM [info] Initial git state: [object Object]\n",Log,tab
|
| 4 |
+
3,1773,"tab-model-eval/src/evaluation/sglang_eval.py",0,0,"",python,tab
|
| 5 |
+
4,142831,"TERMINAL",0,0,"git diff",,terminal_command
|
| 6 |
+
5,142985,"TERMINAL",0,0,"]633;C[?1h=\r[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n:[K",,terminal_output
|
| 7 |
+
6,144565,"TERMINAL",0,0,"...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n:[K",,terminal_output
|
| 8 |
+
7,144746,"TERMINAL",0,0,"...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n[31m-[m[m\r\n[31m- # always update weight first so that sglang has the loaded weights from training.[m[m\r\n[31m- actor_model.update_weights()[m[m\r\n[31m-[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n[31m-[m[m\r\n[31m- # always update weight first so that sglang has the loaded weights from training.[m[m\r\n[31m- actor_model.update_weights()[m[m\r\n[31m-[m[m\r\n[31m- if args.check_weight_update_equal:[m[m\r\n[31m- ray.get(rollout_manager.check_weights.remote(action=""compare""))[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n[31m-[m[m\r\n[31m- # always update weight first so that sglang has the loaded weights from training.[m[m\r\n[31m- actor_model.update_weights()[m[m\r\n[31m-[m[m\r\n[31m- if args.check_weight_update_equal:[m[m\r\n[31m- ray.get(rollout_manager.check_weights.remote(action=""compare""))[m[m\r\n[31m-[m[m\r\n:[K...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n[31m-[m[m\r\n[31m- # always update weight first so that sglang has the loaded weights from training.[m[m\r\n[31m- actor_model.update_weights()[m[m\r\n[31m-[m[m\r\n[31m- if args.check_weight_update_equal:[m[m\r\n[31m- ray.get(rollout_manager.check_weights.remote(action=""compare""))[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n:[K",,terminal_output
|
| 9 |
+
8,144862,"TERMINAL",0,0,"...skipping...\r\n[1mdiff --git a/train.py b/train.py[m[m\r\n[1mdeleted file mode 100644[m[m\r\n[1mindex 9fb480e..0000000[m[m\r\n[1m--- a/train.py[m[m\r\n[1m+++ /dev/null[m[m\r\n[36m@@ -1,106 +0,0 @@[m[m\r\n[31m-import ray[m[m\r\n[31m-from sglang.srt.constants import GPU_MEMORY_TYPE_KV_CACHE, GPU_MEMORY_TYPE_WEIGHTS[m[m\r\n[31m-[m[m\r\n[31m-try:[m[m\r\n[31m- from sglang.srt.constants import GPU_MEMORY_TYPE_CUDA_GRAPH[m[m\r\n[31m-except ImportError:[m[m\r\n[31m- GPU_MEMORY_TYPE_CUDA_GRAPH = None[m[m\r\n[31m-[m[m\r\n[31m-from miles.ray.placement_group import create_placement_groups, create_rollout_manager, create_training_models[m[m\r\n[31m-from miles.utils.arguments import parse_args[m[m\r\n[31m-from miles.utils.logging_utils import configure_logger[m[m\r\n[31m-from miles.utils.misc import should_run_periodic_action[m[m\r\n[31m-from miles.utils.tracking_utils import init_tracking[m[m\r\n[31m-[m[m\r\n[31m-[m[m\r\n[31m-def train(args):[m[m\r\n[31m- configure_logger()[m[m\r\n[31m- # allocate the GPUs[m[m\r\n[31m- pgs = create_placement_groups(args)[m[m\r\n[31m- init_tracking(args)[m[m\r\n[31m-[m[m\r\n[31m- # create the rollout manager, with sglang engines inside.[m[m\r\n[31m- # need to initialize rollout manager first to calculate num_rollout[m[m\r\n[31m- rollout_manager, num_rollout_per_epoch = create_rollout_manager(args, pgs[""rollout""])[m[m\r\n[31m-[m[m\r\n[31m- # create the actor and critic models[m[m\r\n[31m- actor_model, critic_model = create_training_models(args, pgs, rollout_manager)[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_WEIGHTS]))[m[m\r\n[31m-[m[m\r\n[31m- # always update weight first so that sglang has the loaded weights from training.[m[m\r\n[31m- actor_model.update_weights()[m[m\r\n[31m-[m[m\r\n[31m- if args.check_weight_update_equal:[m[m\r\n[31m- ray.get(rollout_manager.check_weights.remote(action=""compare""))[m[m\r\n[31m-[m[m\r\n[31m- if args.offload_rollout:[m[m\r\n[31m- if GPU_MEMORY_TYPE_CUDA_GRAPH is not None:[m[m\r\n:[K",,terminal_output
|
| 10 |
+
9,145349,"TERMINAL",0,0,"\r[K[31m- ray.get(rollout_manager.onload.remote(tags=[GPU_MEMORY_TYPE_CUDA_GRAPH]))[m[m\r\n:[K",,terminal_output
|
| 11 |
+
10,146004,"TERMINAL",0,0,"\r[K[HM[1mdiff --git a/train.py b/train.py[m[m\r\n[46;1H\r[K:[K",,terminal_output
|
| 12 |
+
11,146352,"TERMINAL",0,0,"\r[K\r[K:[K",,terminal_output
|
| 13 |
+
12,146548,"TERMINAL",0,0,"\r[K\r[K:[K",,terminal_output
|
| 14 |
+
13,146884,"TERMINAL",0,0,"\r[K\r[K:[K",,terminal_output
|
| 15 |
+
14,148417,"TERMINAL",0,0,"\r[K[?1l>]0;mahajan1@jwlogin21:~/projects/envcomp/miles",,terminal_output
|
| 16 |
+
15,154778,"TERMINAL",0,0,"cd tab-model-eval/",,terminal_command
|
| 17 |
+
16,154781,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin21:~/projects/envcomp/miles/tab-model-eval",,terminal_output
|
| 18 |
+
17,156747,"TERMINAL",0,0,"git diff",,terminal_command
|
| 19 |
+
18,156780,"TERMINAL",0,0,"]633;C[?1h=\r[1mdiff --git a/src/evaluation/sglang_generations.py b/src/evaluation/sglang_generations.py[m[m\r\n[1mindex 554e563..822a89b 100644[m[m\r\n[1m--- a/src/evaluation/sglang_generations.py[m[m\r\n[1m+++ b/src/evaluation/sglang_generations.py[m[m\r\n[36m@@ -39,10 +39,11 @@[m [mclass Args:[m[m\r\n # Model-related[m[m\r\n temperature: float = 0.7[m[m\r\n top_p: float = 0.8[m[m\r\n[31m- presence_penalty: float = 1.5[m[m\r\n[32m+[m[32m presence_penalty: float = 0.0[m[m\r\n top_k: int = 20[m[m\r\n min_p: float = 0.0[m[m\r\n num_samples: int = 5[m[m\r\n[32m+[m[32m max_new_tokens: int = 5000[m[m\r\n [m[m\r\n # HTTP / client config[m[m\r\n concurrency: int = 16[m[m\r\n[36m@@ -144,6 +145,7 @@[m [masync def generate_next_command([m[m\r\n top_p=args.top_p,[m[m\r\n presence_penalty=args.presence_penalty,[m[m\r\n n=args.num_samples,[m[m\r\n[32m+[m[32m max_tokens=args.max_new_tokens,[m[m\r\n extra_body={[m[m\r\n ""top_k"": args.top_k,[m[m\r\n },[m[m\r\n[1mdiff --git a/src/evaluation/sync_local_logs_to_wandb.py b/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[1mindex 50f9f10..c765579 100644[m[m\r\n[1m--- a/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[1m+++ b/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[36m@@ -84,7 +84,7 @@[m [mdef sync_single_run(log_dir: str, dry_run: bool = False) -> bool:[m[m\r\n run = wandb.init([m[m\r\n project=metadata[""project""],[m[m\r\n name=metadata[""run_name""],[m[m\r\n[31m- id=metadata[""run_id""],[m[m\r\n[32m+[m[32m id=f""evaluate_{metadata[""run_id""]}"",[m[m\r\n config=metadata.get(""config"", {}),[m[m\r\n tags=metadata.get(""tags"", []),[m[m\r\n resume=""allow"", # Resume if exists, create if not[m[m\r\n\r[K[?1l>]0;mahajan1@jwlogin21:~/projects/envcomp/miles/tab-model-eval",,terminal_output
|
| 20 |
+
19,173057,"TERMINAL",0,0,"git diff",,terminal_command
|
| 21 |
+
20,173060,"TERMINAL",0,0,"]633;C[?1h=\r[1mdiff --git a/src/evaluation/sglang_generations.py b/src/evaluation/sglang_generations.py[m[m\r\n[1mindex 554e563..822a89b 100644[m[m\r\n[1m--- a/src/evaluation/sglang_generations.py[m[m\r\n[1m+++ b/src/evaluation/sglang_generations.py[m[m\r\n[36m@@ -39,10 +39,11 @@[m [mclass Args:[m[m\r\n # Model-related[m[m\r\n temperature: float = 0.7[m[m\r\n top_p: float = 0.8[m[m\r\n[31m- presence_penalty: float = 1.5[m[m\r\n[32m+[m[32m presence_penalty: float = 0.0[m[m\r\n top_k: int = 20[m[m\r\n min_p: float = 0.0[m[m\r\n num_samples: int = 5[m[m\r\n[32m+[m[32m max_new_tokens: int = 5000[m[m\r\n [m[m\r\n # HTTP / client config[m[m\r\n concurrency: int = 16[m[m\r\n[36m@@ -144,6 +145,7 @@[m [masync def generate_next_command([m[m\r\n top_p=args.top_p,[m[m\r\n presence_penalty=args.presence_penalty,[m[m\r\n n=args.num_samples,[m[m\r\n[32m+[m[32m max_tokens=args.max_new_tokens,[m[m\r\n extra_body={[m[m\r\n ""top_k"": args.top_k,[m[m\r\n },[m[m\r\n[1mdiff --git a/src/evaluation/sync_local_logs_to_wandb.py b/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[1mindex 50f9f10..c765579 100644[m[m\r\n[1m--- a/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[1m+++ b/src/evaluation/sync_local_logs_to_wandb.py[m[m\r\n[36m@@ -84,7 +84,7 @@[m [mdef sync_single_run(log_dir: str, dry_run: bool = False) -> bool:[m[m\r\n run = wandb.init([m[m\r\n project=metadata[""project""],[m[m\r\n name=metadata[""run_name""],[m[m\r\n[31m- id=metadata[""run_id""],[m[m\r\n[32m+[m[32m id=f""evaluate_{metadata[""run_id""]}"",[m[m\r\n config=metadata.get(""config"", {}),[m[m\r\n tags=metadata.get(""tags"", []),[m[m\r\n resume=""allow"", # Resume if exists, create if not[m[m\r\n\r[K[?1l>]0;mahajan1@jwlogin21:~/projects/envcomp/miles/tab-model-eval",,terminal_output
|
| 22 |
+
21,594981,"tab-model-eval/src/evaluation/sglang_eval.py",13453,0,"",python,selection_mouse
|
| 23 |
+
22,594997,"tab-model-eval/src/evaluation/sglang_eval.py",13452,0,"",python,selection_command
|
| 24 |
+
23,1090903,"tab-model-eval/src/evaluation/sglang_eval.py",13559,0,"",python,selection_mouse
|
| 25 |
+
24,1217328,"tab-model-eval/src/evaluation/sglang_eval.py",13780,0,"",python,selection_mouse
|
| 26 |
+
25,1217335,"tab-model-eval/src/evaluation/sglang_eval.py",13779,0,"",python,selection_command
|
| 27 |
+
26,1217859,"tab-model-eval/src/evaluation/sglang_eval.py",13781,0,"",python,selection_mouse
|
| 28 |
+
27,1219273,"tab-model-eval/src/evaluation/sglang_eval.py",13559,0,"",python,selection_mouse
|
| 29 |
+
28,1220768,"tab-model-eval/src/evaluation/sglang_eval.py",13458,0,"",python,selection_mouse
|
| 30 |
+
29,1220789,"tab-model-eval/src/evaluation/sglang_eval.py",13457,0,"",python,selection_command
|
| 31 |
+
30,1222704,"tab-model-eval/src/evaluation/sglang_eval.py",13851,0,"",python,selection_mouse
|
| 32 |
+
31,1224544,"tab-model-eval/src/evaluation/sglang_eval.py",14232,0,"",python,selection_mouse
|
| 33 |
+
32,1226368,"tab-model-eval/src/evaluation/sglang_eval.py",14388,0,"",python,selection_mouse
|
| 34 |
+
33,1227193,"tab-model-eval/src/evaluation/sglang_eval.py",14387,0,"",python,selection_mouse
|
| 35 |
+
34,1227197,"tab-model-eval/src/evaluation/sglang_eval.py",14386,0,"",python,selection_command
|
| 36 |
+
35,1227966,"tab-model-eval/src/evaluation/sglang_eval.py",14232,0,"",python,selection_mouse
|
| 37 |
+
36,1228911,"tab-model-eval/src/evaluation/sglang_eval.py",13851,0,"",python,selection_mouse
|
| 38 |
+
37,1230211,"tab-model-eval/src/evaluation/sglang_eval.py",13780,0,"",python,selection_mouse
|
| 39 |
+
38,1230232,"tab-model-eval/src/evaluation/sglang_eval.py",13779,0,"",python,selection_command
|
| 40 |
+
39,1230687,"tab-model-eval/src/evaluation/sglang_eval.py",13781,0,"",python,selection_mouse
|
| 41 |
+
40,1231859,"tab-model-eval/src/evaluation/sglang_eval.py",13559,0,"",python,selection_mouse
|
| 42 |
+
41,1285609,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",0,0,"#!/usr/bin/env python3\n""""""\nSync local evaluation logs to wandb.\n\nThis script reads local logs created by sglang_eval.py when using --use_local_logger\nand uploads them to wandb as a single run with all data points.\n\nUsage:\n python sync_local_logs_to_wandb.py --log_dir data/eval/local_logs/eval_<RUN_ID>\n \nOr to sync all runs in a directory:\n python sync_local_logs_to_wandb.py --log_dir data/eval/local_logs --sync_all\n""""""\n\nimport argparse\nimport json\nimport os\nimport sys\nfrom pathlib import Path\n\nimport wandb\n\n\ndef load_metadata(log_dir: str) -> dict:\n """"""Load run metadata from the log directory.""""""\n metadata_file = os.path.join(log_dir, ""metadata.json"")\n if not os.path.exists(metadata_file):\n raise FileNotFoundError(f""Metadata file not found: {metadata_file}"")\n \n with open(metadata_file, ""r"") as f:\n return json.load(f)\n\n\ndef load_metrics(log_dir: str) -> list:\n """"""Load all metrics from the JSONL file.""""""\n metrics_file = os.path.join(log_dir, ""metrics.jsonl"")\n if not os.path.exists(metrics_file):\n raise FileNotFoundError(f""Metrics file not found: {metrics_file}"")\n \n metrics = []\n with open(metrics_file, ""r"") as f:\n for line in f:\n line = line.strip()\n if line:\n metrics.append(json.loads(line))\n \n return metrics\n\n\ndef sync_single_run(log_dir: str, dry_run: bool = False) -> bool:\n """"""\n Sync a single run's local logs to wandb.\n \n Returns True if successful, False otherwise.\n """"""\n print(f""\n{'='*60}"")\n print(f""Syncing: {log_dir}"")\n print(f""{'='*60}"")\n \n try:\n # Load metadata and metrics\n metadata = load_metadata(log_dir)\n metrics = load_metrics(log_dir)\n \n if not metrics:\n print(f"" Warning: No metrics found in {log_dir}"")\n return False\n \n # Sort metrics by eval_step\n metrics.sort(key=lambda x: x.get(""eval_step"", 0))\n \n print(f"" Run ID: {metadata['run_id']}"")\n print(f"" Run Name: {metadata['run_name']}"")\n print(f"" Project: {metadata['project']}"")\n print(f"" Tags: {metadata.get('tags', [])}"")\n print(f"" Data points: {len(metrics)}"")\n print(f"" Steps: {[m.get('eval_step', 'N/A') for m in metrics]}"")\n \n if dry_run:\n print("" [DRY RUN] Would upload to wandb"")\n return True\n \n # Initialize wandb run\n run = wandb.init(\n project=metadata[""project""],\n name=metadata[""run_name""],\n id=f""evaluate_{metadata[""run_id""]}"",\n config=metadata.get(""config"", {}),\n tags=metadata.get(""tags"", []),\n resume=""allow"", # Resume if exists, create if not\n )\n \n # Log each metric with its step\n for metric_entry in metrics:\n # Remove timestamp for wandb logging\n entry = {k: v for k, v in metric_entry.items() if k != ""timestamp""}\n wandb.log(entry)\n \n wandb.finish()\n print(f"" ✓ Successfully synced to wandb!"")\n return True\n \n except Exception as e:\n print(f"" ✗ Error syncing {log_dir}: {e}"")\n return False\n\n\ndef find_run_dirs(base_dir: str) -> list:\n """"""Find all run directories in the base directory.""""""\n run_dirs = []\n \n for item in os.listdir(base_dir):\n item_path = os.path.join(base_dir, item)\n if os.path.isdir(item_path):\n # Check if it's a valid run directory (has metadata.json)\n metadata_file = os.path.join(item_path, ""metadata.json"")\n if os.path.exists(metadata_file):\n run_dirs.append(item_path)\n \n return sorted(run_dirs)\n\n\ndef main():\n parser = argparse.ArgumentParser(\n description=""Sync local evaluation logs to wandb"",\n formatter_class=argparse.RawDescriptionHelpFormatter,\n epilog=""""""\nExamples:\n # Sync a single run\n python sync_local_logs_to_wandb.py --log_dir data/eval/local_logs/eval_13032805\n \n # Sync all runs in a directory\n python sync_local_logs_to_wandb.py --log_dir data/eval/local_logs --sync_all\n \n # Dry run to see what would be synced\n python sync_local_logs_to_wandb.py --log_dir data/eval/local_logs --sync_all --dry_run\n""""""\n )\n \n parser.add_argument(\n ""--log_dir"",\n type=str,\n required=True,\n help=""Path to the log directory (single run) or parent directory (with --sync_all)""\n )\n parser.add_argument(\n ""--sync_all"",\n action=""store_true"",\n help=""Sync all runs found in the log_dir""\n )\n parser.add_argument(\n ""--dry_run"",\n action=""store_true"",\n help=""Show what would be synced without actually uploading""\n )\n \n args = parser.parse_args()\n \n if not os.path.exists(args.log_dir):\n print(f""Error: Log directory not found: {args.log_dir}"")\n sys.exit(1)\n \n if args.sync_all:\n # Sync all runs in the directory\n run_dirs = find_run_dirs(args.log_dir)\n \n if not run_dirs:\n print(f""No valid run directories found in {args.log_dir}"")\n sys.exit(1)\n \n print(f""Found {len(run_dirs)} run(s) to sync:"")\n for d in run_dirs:\n print(f"" - {os.path.basename(d)}"")\n \n success_count = 0\n for run_dir in run_dirs:\n if sync_single_run(run_dir, dry_run=args.dry_run):\n success_count += 1\n \n print(f""\n{'='*60}"")\n print(f""Sync complete: {success_count}/{len(run_dirs)} runs synced successfully"")\n print(f""{'='*60}"")\n \n else:\n # Sync single run\n # Check if log_dir is the run directory or contains metadata.json\n if os.path.exists(os.path.join(args.log_dir, ""metadata.json"")):\n sync_single_run(args.log_dir, dry_run=args.dry_run)\n else:\n print(f""Error: {args.log_dir} does not appear to be a valid run directory"")\n print("" (missing metadata.json)"")\n print(""\nHint: Use --sync_all to sync all runs in a parent directory"")\n sys.exit(1)\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab
|
| 43 |
+
42,1285723,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2122,0,"",python,selection_mouse
|
| 44 |
+
43,1285723,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2121,0,"",python,selection_command
|
| 45 |
+
44,1287340,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2172,0,"",python,selection_command
|
| 46 |
+
45,1287668,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2222,0,"",python,selection_command
|
| 47 |
+
46,1287669,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2273,0,"",python,selection_command
|
| 48 |
+
47,1287845,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2305,0,"",python,selection_command
|
| 49 |
+
48,1288024,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2325,0,"",python,selection_command
|
| 50 |
+
49,1288200,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2376,0,"",python,selection_command
|
| 51 |
+
50,1288227,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2404,0,"",python,selection_command
|
| 52 |
+
51,1288380,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2413,0,"",python,selection_command
|
| 53 |
+
52,1288508,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2444,0,"",python,selection_command
|
| 54 |
+
53,1288705,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2470,0,"",python,selection_command
|
| 55 |
+
54,1288886,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2511,0,"",python,selection_command
|
| 56 |
+
55,1288959,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2550,0,"",python,selection_command
|
| 57 |
+
56,1289067,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2599,0,"",python,selection_command
|
| 58 |
+
57,1289281,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2646,0,"",python,selection_command
|
| 59 |
+
58,1289664,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2599,0,"",python,selection_command
|
| 60 |
+
59,1290744,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2618,0,"",python,selection_command
|
| 61 |
+
60,1290929,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2619,0,"",python,selection_command
|
| 62 |
+
61,1292519,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2570,0,"",python,selection_command
|
| 63 |
+
62,1294253,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2552,49,"",python,content
|
| 64 |
+
63,1294259,"tab-model-eval/src/evaluation/sync_local_logs_to_wandb.py",2564,0,"",python,selection_command
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-e69fee36-85ea-4c2a-bf7e-90b6490333df1767532068697-2026_01_04-14.08.44.100/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-f4bf9883-3801-446a-98c6-413295d94c701767091439743-2025_12_30-11.44.46.167/source.csv
ADDED
|
@@ -0,0 +1,360 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,266,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"11:44:45 AM [info] Activating crowd-code\n11:44:46 AM [info] Recording started\n11:44:46 AM [info] Initializing git provider using file system watchers...\n11:44:46 AM [info] Git repository found\n11:44:46 AM [info] Git provider initialized successfully\n11:44:46 AM [info] Initial git state: [object Object]\n",Log,tab
|
| 3 |
+
3,32786,"TERMINAL",0,0,"watch -n1 squeue --me",,terminal_command
|
| 4 |
+
4,32830,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 5 |
+
5,33102,"TERMINAL",0,0,"[?1049h[22;0;0t[1;44r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;115Hjwlogin23.juwels: Tue Dec 30 11:45:18 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;11H13032808 booster qwen_32b mahajan1 PD\t0:00 64 (Resources)[5;11H13032806 booster qwen_32b mahajan1 R 8:12:30 64 jwb[0200-0204,0213-0217,0219-0224,0226-0236,0245-0264,0266-0268,0277-0285,0287-0291][44;156H",,terminal_output
|
| 6 |
+
6,34350,"TERMINAL",0,0,"[1;150H20[5;60H1[44;156H",,terminal_output
|
| 7 |
+
7,35610,"TERMINAL",0,0,"[1;151H1[5;60H2[44;156H",,terminal_output
|
| 8 |
+
8,36722,"TERMINAL",0,0,"[44;1H[?1049l[23;0;0t\r[?1l>]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 9 |
+
9,37647,"TERMINAL",0,0,"squeue --me -o ""%.10i %.16P %.60j %.8u %.8T %.10M %.9l %.6D %R""",,terminal_command
|
| 10 |
+
10,37695,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 11 |
+
11,37896,"TERMINAL",0,0," JOBID PARTITION NAME USER STATE TIME TIME_LIMI NODES NODELIST(REASON)\r\n 13032808 booster qwen_32b_no_lora_bs_32 mahajan1 PENDING 0:00 1-00:00:00 64 (Resources)\r\n 13032806 booster qwen_32b_lora_bs_32 mahajan1 RUNNING 8:12:35 1-00:00:00 64 jwb[0200-0204,0213-0217,0219-0224,0226-0236,0245-0264,0266-0268,0277-0285,0287-0291]\r\n]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 12 |
+
12,58404,"TERMINAL",0,0,"bash",,terminal_focus
|
| 13 |
+
13,58830,"TERMINAL",0,0,"bash",,terminal_focus
|
| 14 |
+
14,59615,"TERMINAL",0,0,"scancel 13032808",,terminal_command
|
| 15 |
+
15,59662,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 16 |
+
16,59854,"TERMINAL",0,0,"]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 17 |
+
17,68172,"TERMINAL",0,0,"sacct --format=""JobID%15,JobName%30,Partition%16,AllocCPUS%3,State%12,Elapsed%10,Timelimit%10"" --starttime $(date -d ""last week"" +%Y-%m-%d)",,terminal_command
|
| 18 |
+
18,68218,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 19 |
+
19,68474,"TERMINAL",0,0," JobID JobName Partition All State Elapsed Timelimit \r\n--------------- ------------------------------ ---------------- --- ------------ ---------- ---------- \r\n 13024059.0 sh 16 FAILED 00:00:22 \r\n 13024059.1 sh 16 FAILED 00:01:49 \r\n 13024059.2 sh 16 FAILED 00:03:59 \r\n 13024095.0 torchrun 16 FAILED 00:00:15 \r\n 13024095.2 sh 16 COMPLETED 00:00:03 \r\n 13024095.4 sh 16 FAILED 00:06:09 \r\n 13024095.7 sh 16 FAILED 00:00:35 \r\n 13024209.0 sh 16 FAILED 00:03:47 \r\n 13024229 qwen_1.7b_lora_bs_8 booster 0 FAILED 00:00:00 1-00:00:00 \r\n 13024250 qwen_1.7b_lora_bs_16 booster 192 FAILED 00:15:49 1-00:00:00 \r\n 13024250.0 sh 20 FAILED 00:15:27 \r\n 13024251 qwen_1.7b_lora_bs_32 booster 192 FAILED 00:08:02 1-00:00:00 \r\n 13024251.0 sh 20 FAILED 00:07:41 \r\n 13024252 qwen_1.7b_lora_bs_8 booster 192 FAILED 00:15:38 1-00:00:00 \r\n 13024252.0 sh 20 FAILED 00:15:17 \r\n 13024253 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 06:02:31 1-00:00:00 \r\n 13024253.0 sh 20 COMPLETED 06:02:09 \r\n 13024254 qwen_1.7b_lora_bs_32 booster 192 FAILED 00:06:09 1-00:00:00 \r\n 13024254.0 sh 20 FAILED 00:05:48 \r\n 13024255 qwen_1.7b_lora_bs_8 booster 192 FAILED 00:01:48 1-00:00:00 \r\n 13024255.0 sh 20 FAILED 00:01:27 \r\n 13024256 qwen_4b_lora_bs_16 booster 384 FAILED 00:15:39 1-00:00:00 \r\n 13024256.0 sh 40 FAILED 00:15:17 \r\n 13024257 qwen_4b_lora_bs_32 booster 384 FAILED 00:06:48 1-00:00:00 \r\n 13024257.0 sh 40 FAILED 00:06:26 \r\n 13024258 qwen_4b_lora_bs_8 booster 384 FAILED 00:03:43 1-00:00:00 \r\n 13024258.0 sh 40 FAILED 00:03:21 \r\n 13024259 qwen_4b_lora_bs_16 booster 384 FAILED 00:05:27 1-00:00:00 \r\n 13024259.0 sh 40 FAILED 00:05:05 \r\n 13024260 qwen_4b_lora_bs_32 booster 384 FAILED 00:06:27 1-00:00:00 \r\n 13024260.0 sh 40 FAILED 00:06:06 \r\n 13024261 qwen_4b_lora_bs_8 booster 384 FAILED 00:04:01 1-00:00:00 \r\n 13024261.0 sh 40 FAILED 00:03:39 \r\n 13025339 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 04:13:45 1-00:00:00 \r\n 13025339.0 sh 20 COMPLETED 04:13:21 \r\n 13025340 qwen_1.7b_lora_bs_32 booster 192 FAILED 00:07:09 1-00:00:00 \r\n 13025340.0 sh 20 FAILED 00:06:47 \r\n 13025341 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 04:18:27 1-00:00:00 \r\n 13025341.0 sh 20 COMPLETED 04:18:04 \r\n 13025342 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 05:58:27 1-00:00:00 \r\n 13025342.0 sh 20 COMPLETED 05:58:04 \r\n 13025343 qwen_1.7b_lora_bs_32 booster 192 FAILED 00:05:17 1-00:00:00 \r\n 13025343.0 sh 20 FAILED 00:04:55 \r\n 13025344 qwen_1.7b_lora_bs_8 booster 192 FAILED 00:00:49 1-00:00:00 \r\n 13025344.0 sh 20 FAILED 00:00:27 \r\n 13025345 qwen_4b_lora_bs_16 booster 384 FAILED 00:32:33 1-00:00:00 \r\n 13025345.0 sh 40 FAILED 00:32:11 \r\n 13025346 qwen_4b_lora_bs_32 booster 384 FAILED 00:05:49 1-00:00:00 \r\n 13025346.0 sh 40 FAILED 00:05:26 \r\n 13025347 qwen_4b_lora_bs_8 booster 384 FAILED 00:02:41 1-00:00:00 \r\n 13025347.0 sh 40 FAILED 00:02:19 \r\n 13025348 qwen_4b_lora_bs_16 booster 384 FAILED 00:04:21 1-00:00:00 \r\n 13025348.0 sh 40 FAILED 00:03:59 \r\n 13025349 qwen_4b_lora_bs_32 booster 384 FAILED 00:05:28 1-00:00:00 \r\n 13025349.0 sh 40 FAILED 00:05:05 \r\n 13025350 qwen_4b_lora_bs_8 booster 384 FAILED 00:02:59 1-00:00:00 \r\n 13025350.0 sh 40 FAILED 00:02:37 \r\n 13025365 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 04:20:44 1-00:00:00 \r\n 13025365.0 sh 20 COMPLETED 04:20:20 \r\n 13030348.0 sh 16 FAILED 00:06:31 \r\n 13030348.3 sh 16 FAILED 00:03:42 \r\n 13030386 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 03:52:07 1-00:00:00 \r\n 13030386.0 sh 20 COMPLETED 03:51:44 \r\n 13030387 qwen_1.7b_lora_bs_32 booster 384 COMPLETED 04:40:04 1-00:00:00 \r\n 13030387.0 sh 40 COMPLETED 04:39:40 \r\n 13030388 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 04:01:53 1-00:00:00 \r\n 13030388.0 sh 20 COMPLETED 04:01:31 \r\n 13030389 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 05:40:57 1-00:00:00 \r\n 13030389.0 sh 20 COMPLETED 05:40:33 \r\n 13030390 qwen_1.7b_lora_bs_32 booster 384 COMPLETED 03:42:05 1-00:00:00 \r\n 13030390.0 sh 40 COMPLETED 03:41:41 \r\n 13030391 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 05:55:16 1-00:00:00 \r\n 13030391.0 sh 20 COMPLETED 05:54:52 \r\n 13030392 qwen_4b_lora_bs_16 booster 384 COMPLETED 04:41:52 1-00:00:00 \r\n 13030392.0 sh 40 COMPLETED 04:41:31 \r\n 13030393 qwen_4b_lora_bs_32 booster 768 COMPLETED 03:11:49 1-00:00:00 \r\n 13030393.0 sh 80 COMPLETED 03:11:27 \r\n 13030394 qwen_4b_lora_bs_8 booster 192 COMPLETED 07:12:58 1-00:00:00 \r\n 13030394.0 sh 20 COMPLETED 07:12:37 \r\n 13030395 qwen_4b_lora_bs_16 booster 384 COMPLETED 07:03:11 1-00:00:00 \r\n 13030395.0 sh 40 COMPLETED 07:02:50 \r\n 13030396 qwen_4b_lora_bs_32 booster 768 COMPLETED 04:35:42 1-00:00:00 \r\n 13030396.0 sh 80 COMPLETED 04:35:20 \r\n 13030397 qwen_4b_lora_bs_8 booster 192 COMPLETED 14:08:39 1-00:00:00 \r\n 13030397.0 sh 20 COMPLETED 14:08:18 \r\n 13030398 qwen_8b_lora_bs_16 booster 384 COMPLETED 07:15:28 1-00:00:00 \r\n 13030398.0 sh 40 COMPLETED 07:15:06 \r\n 13030399 qwen_8b_lora_bs_32 booster 768 COMPLETED 04:40:58 1-00:00:00 \r\n 13030399.0 sh 80 COMPLETED 04:40:36 \r\n 13030400 qwen_8b_lora_bs_8 booster 192 COMPLETED 11:59:57 1-00:00:00 \r\n 13030400.0 sh 20 COMPLETED 11:59:36 \r\n 13030401 qwen_8b_lora_bs_16 booster 384 FAILED 00:06:33 1-00:00:00 \r\n 13030401.0 sh 40 FAILED 00:06:12 \r\n 13030402 qwen_8b_lora_bs_32 booster 768 FAILED 00:05:45 1-00:00:00 \r\n 13030402.0 sh 80 FAILED 00:05:23 \r\n 13030403 qwen_8b_lora_bs_8 booster 192 FAILED 00:06:14 1-00:00:00 \r\n 13030403.0 sh 20 FAILED 00:05:52 \r\n 13031068.0 sh 16 FAILED 00:04:00 \r\n 13031068.1 sh 16 FAILED 00:02:57 \r\n 13031068.2 sh 16 FAILED 00:02:54 \r\n 13031093.1 sh 32 FAILED 00:00:52 \r\n 13031093.2 sh 32 FAILED 00:00:51 \r\n 13031093.3 sh 32 FAILED 00:00:49 \r\n 13031093.4 sh 32 FAILED 00:00:48 \r\n 13031854.0 sh 40 FAILED 00:52:06 \r\n 13031856.0 sh 160 FAILED 00:17:59 \r\n 13031902.4 sh 32 FAILED 00:04:16 \r\n 13031978 qwen_8b_no_lora_bs_8 booster 384 FAILED 01:09:06 1-00:00:00 \r\n 13031978.0 sh 40 FAILED 01:08:44 \r\n 13031979 qwen_8b_no_lora_bs_16 booster 768 FAILED 12:08:49 1-00:00:00 \r\n 13031979.0 sh 80 FAILED 12:08:24 \r\n 13031981 qwen_8b_no_lora_bs_32 booster 1536 FAILED 07:43:12 1-00:00:00 \r\n 13031981.0 sh 160 FAILED 07:42:47 \r\n 13031982 qwen_32b_lora_bs_8 booster 768 FAILED 03:27:04 1-00:00:00 \r\n 13031982.0 sh 80 FAILED 03:26:39 \r\n 13032780 qwen_8b_no_lora_bs_8 booster 384 FAILED 17:33:36 1-00:00:00 \r\n 13032780.0 sh 40 FAILED 17:33:12 \r\n 13032800 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 03:54:13 1-00:00:00 \r\n 13032800.0 sh 20 COMPLETED 03:53:47 \r\n 13032801 qwen_1.7b_lora_bs_32 booster 384 COMPLETED 04:40:04 1-00:00:00 \r\n 13032801.0 sh 40 COMPLETED 04:39:41 \r\n 13032802 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 04:04:53 1-00:00:00 \r\n 13032802.0 sh 20 COMPLETED 04:04:28 \r\n 13032803 qwen_1.7b_lora_bs_16 booster 192 COMPLETED 05:39:56 1-00:00:00 \r\n 13032803.0 sh 20 COMPLETED 05:39:30 \r\n 13032804 qwen_1.7b_lora_bs_32 booster 384 COMPLETED 03:43:42 1-00:00:00 \r\n 13032804.0 sh 40 COMPLETED 03:43:18 \r\n 13032805 qwen_1.7b_lora_bs_8 booster 192 COMPLETED 05:56:43 1-00:00:00 \r\n 13032805.0 sh 20 COMPLETED 05:56:19 \r\n 13032806 qwen_32b_lora_bs_32 booster 6144 RUNNING 08:13:05 1-00:00:00 \r\n 13032806.0 sh 640 RUNNING 08:12:42 \r\n 13032807 qwen_32b_lora_bs_8 booster 768 TIMEOUT 1-00:00:30 1-00:00:00 \r\n 13032809 qwen_4b_lora_bs_16 booster 384 COMPLETED 04:47:21 1-00:00:00 \r\n 13032809.0 sh 40 COMPLETED 04:46:57 \r\n 13032810 qwen_4b_lora_bs_32 booster 768 COMPLETED 03:11:21 1-00:00:00 \r\n 13032810.0 sh 80 COMPLETED 03:10:55 \r\n 13032811 qwen_4b_lora_bs_8 booster 192 COMPLETED 07:12:50 1-00:00:00 \r\n 13032811.0 sh 20 COMPLETED 07:12:27 \r\n 13032812 qwen_4b_lora_bs_16 booster 384 COMPLETED 07:06:10 1-00:00:00 \r\n 13032812.0 sh 40 COMPLETED 07:05:47 \r\n 13032813 qwen_4b_lora_bs_32 booster 768 COMPLETED 04:43:02 1-00:00:00 \r\n 13032813.0 sh 80 COMPLETED 04:42:39 \r\n 13032814 qwen_4b_lora_bs_8 booster 192 COMPLETED 13:51:27 1-00:00:00 \r\n 13032814.0 sh 20 COMPLETED 13:51:03 \r\n 13032815 qwen_8b_lora_bs_16 booster 384 COMPLETED 07:21:14 1-00:00:00 \r\n 13032815.0 sh 40 COMPLETED 07:20:52 \r\n 13032816 qwen_8b_lora_bs_32 booster 768 COMPLETED 04:44:36 1-00:00:00 \r\n 13032816.0 sh 80 COMPLETED 04:44:13 \r\n 13032817 qwen_8b_lora_bs_8 booster 192 COMPLETED 12:05:55 1-00:00:00 \r\n 13032817.0 sh 20 COMPLETED 12:05:33 \r\n 13032818 qwen_8b_no_lora_bs_16 booster 768 COMPLETED 08:07:43 1-00:00:00 \r\n 13032818.0 sh 80 COMPLETED 08:07:20 \r\n 13032820 qwen_8b_no_lora_bs_32 booster 1536 COMPLETED 04:46:00 1-00:00:00 \r\n 13032820.0 sh 160 COMPLETED 04:45:35 \r\n 13032821 qwen_8b_no_lora_bs_8 booster 384 COMPLETED 17:30:39 1-00:00:00 \r\n 13032821.0 sh 40 COMPLETED 17:30:13 \r\n 13034357 qwen_8b_no_lora_bs_32 booster 6144 COMPLETED 07:14:13 1-00:00:00 \r\n 13034357.0 sh 640 COMPLETED 07:13:46 \r\n]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 20 |
+
20,636927,"TERMINAL",0,0,"git diff",,terminal_command
|
| 21 |
+
21,637017,"TERMINAL",0,0,"]633;C[?1h=\r[1mdiff --git a/miles/backends/fsdp_utils/actor.py b/miles/backends/fsdp_utils/actor.py[m[m\r\n[1mindex f500a88..57cc178 100644[m[m\r\n[1m--- a/miles/backends/fsdp_utils/actor.py[m[m\r\n[1m+++ b/miles/backends/fsdp_utils/actor.py[m[m\r\n[36m@@ -973,7 +973,10 @@[m [mdef get_logprob_and_entropy_with_cp([m[m\r\n )[m[m\r\n log_probs_full = torch.log_softmax(shifted_logits, dim=-1)[m[m\r\n probs = torch.softmax(shifted_logits, dim=-1)[m[m\r\n[31m- entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[32m+[m[32m # mentropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[32m+[m[32m p_log_p = probs * log_probs_full[m[m\r\n[32m+[m[32m p_log_p = torch.nan_to_num(p_log_p, nan=0.0)[m[41m [m[m\r\n[32m+[m[32m entropy = -p_log_p.sum(dim=-1)[m[m\r\n return local_log_probs, entropy[m[m\r\n [m[m\r\n chunk_size = logits.shape[0][m[m\r\n[36m@@ -1003,7 +1006,10 @@[m [mdef get_logprob_and_entropy_with_cp([m[m\r\n shifted_logits = logits[:-1, :] if cp_rank == cp_size - 1 else logits[m[m\r\n log_probs_full = torch.log_softmax(shifted_logits, dim=-1)[m[m\r\n probs = torch.softmax(shifted_logits, dim=-1)[m[m\r\n[31m- entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[32m+[m[32m # entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[32m+[m[32m p_log_p = probs * log_probs_full[m[m\r\n[32m+[m[32m p_log_p = torch.nan_to_num(p_log_p, nan=0.0)[m[41m [m[m\r\n[32m+[m[32m entropy = -p_log_p.sum(dim=-1)[m[m\r\n [m[m\r\n # Pad entropy for the last rank[m[m\r\n if cp_rank == cp_size - 1:[m[m\r\n[1mdiff --git a/miles/backends/fsdp_utils/checkpoint.py b/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[1mindex 3846bd9..758ccec 100644[m[m\r\n[1m--- a/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[1m+++ b/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[36m@@ -25,16 +25,17 @@[m [mclass ModelState(Stateful):[m[m\r\n self.keys_filter = keys_filter[m[m\r\n [m[m\r\n def state_dict(self):[m[m\r\n[31m- model_state_dict, _ = get_state_dict(self.model, optimizers=[])[m[m\r\n[32m+[m[32m options = StateDictOptions(full_state_dict=False, cpu_offload=True)[m[m\r\n[32m+[m[32m model_state_dict, _ = get_state_dict(self.model, optimizers=[], options=options)[m[m\r\n if self.keys_filter:[m[m\r\n model_state_dict = {k: v for k, v in model_state_dict.items() if self.keys_filter(k)}[m[m\r\n return {""model"": model_state_dict}[m[m\r\n [m[m\r\n:[K",,terminal_output
|
| 22 |
+
22,667321,"TERMINAL",0,0,"\r[K def load_state_dict(self, state_dict):[m[m\r\n:[K",,terminal_output
|
| 23 |
+
23,667610,"TERMINAL",0,0,"\r[K[31m- options = None[m[m\r\n:[K",,terminal_output
|
| 24 |
+
24,667810,"TERMINAL",0,0,"\r[K[32m+[m[32m options = StateDictOptions(cpu_offload=True)[m[m\r\n:[K",,terminal_output
|
| 25 |
+
25,667947,"TERMINAL",0,0,"\r[K if self.keys_filter:[m[m\r\n:[K",,terminal_output
|
| 26 |
+
26,668099,"TERMINAL",0,0,"\r[K # For filtered loading (e.g., LoRA), use strict=False to allow partial loading[m[m\r\n:[K",,terminal_output
|
| 27 |
+
27,668262,"TERMINAL",0,0,"\r[K[31m- options = StateDictOptions(strict=False)[m[m\r\n:[K",,terminal_output
|
| 28 |
+
28,669549,"TERMINAL",0,0,"\r[K[32m+[m[32m options.strict = False[m[m\r\n:[K",,terminal_output
|
| 29 |
+
29,671741,"TERMINAL",0,0,"\r[K/\r[K[1;1H probs = torch.softmax(shifted_logits, dim=-1)[m[m\r\n[2;1H[31m- entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[3;1H[32m+[m[32m # mentropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[4;1H[32m+[m[32m p_log_p = probs * log_probs_full[m[m\r\n[5;1H[32m+[m[32m p_log_p = torch.nan_to_num(p_log_p, nan=0.0)[m[41m [m[m\r\n[6;1H[32m+[m[32m entropy = -p_log_p.sum(dim=-1)[m[m\r\n[7;1H return local_log_probs, entropy[m[m\r\n[8;1H [m[m\r\n[9;1H chunk_size = logits.shape[0][m[m\r\n[10;1H[36m@@ -1003,7 +1006,10 @@[m [mdef get_logprob_and_entropy_with_cp([m[m\r\n[11;1H shifted_logits = logits[:-1, :] if cp_rank == cp_size - 1 else logits[m[m\r\n[12;1H log_probs_full = torch.log_softmax(shifted_logits, dim=-1)[m[m\r\n[13;1H probs = torch.softmax(shifted_logits, dim=-1)[m[m\r\n[14;1H[31m- entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[15;1H[32m+[m[32m # entropy = -(probs * log_probs_full).sum(dim=-1)[m[m\r\n[16;1H[32m+[m[32m p_log_p = probs * log_probs_full[m[m\r\n[17;1H[32m+[m[32m p_log_p = torch.nan_to_num(p_log_p, nan=0.0)[m[41m [m[m\r\n[18;1H[32m+[m[32m entropy = -p_log_p.sum(dim=-1)[m[m\r\n[19;1H [m[m\r\n[20;1H # Pad entropy for the last rank[m[m\r\n[21;1H if cp_rank == cp_size - 1:[m[m\r\n[22;1H[1mdiff --git a/miles/backends/fsdp_utils/checkpoint.py b/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[23;1H[1mindex 3846bd9..758ccec 100644[m[m\r\n[24;1H[1m--- a/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[25;1H[1m+++ b/miles/backends/fsdp_utils/checkpoint.py[m[m\r\n[26;1H[36m@@ -25,16 +25,17 @@[m [mclass ModelState(Stateful):[m[m\r\n[27;1H self.keys_filter = keys_filter[m[m\r\n[28;1H [m[m\r\n[29;1H def state_dict(self):[m[m\r\n[30;1H[31m- model_state_dict, _ = get_state_dict(self.model, optimizers=[])[m[m\r\n[31;1H[32m+[m[32m options = State[7mD[27m[32mictOptions(full_state_dict=False, cpu_offload=True)[m[m\r\n[32;1H[32m+[m[32m model_state_dict, _ = get_state_dict(self.model, optimizers=[], options=options)[m[m\r\n[33;1H if self.keys_filter:[m[m\r\n[34;1H model_state_dict = {k: v for k, v in model_state_dict.items() if self.keys_filter(k)}[m[m\r\n[35;1H return {""model"": model_state_dict}[m[m\r\n[36;1H [m[m\r\n[37;1H def load_state_dict(self, state_dict):[m[m\r\n[38;1H[31m- options = None[m[m\r\n[39;1H[32m+[m[32m options = State[7mD[27m[32mictOptions(cpu_offload=True)[m[m\r\n[40;1H if self.keys_filter:[m[m\r\n[41;1H # For filtered loading (e.g., LoRA), use strict=False to allow partial loading[m[m\r\n[42;1H[31m- options = State[7mD[27m[31mictOptions(strict=False)[m[m\r\n[43;1H[32m+[m[32m options.strict = False[m[m\r\n[44;1H set_state_dict([m[m\r\n self.model, optimizers=[], [m[m\r\n model_state_dict=state_dict[""model""], [m[m\r\n[36m@@ -52,16 +53,17 @@[m [mclass OptimizerState(Stateful):[m[m\r\n self.keys_filter = keys_filter[m[m\r\n [m[m\r\n def state_dict(self):[m[m\r\n[31m- _, optimizer_state_dict = get_state_dict(self.model, optimizers=self.optimizer)[m[m\r\n[32m+[m[32m options = State[7mD[27m[32mictOptions(full_state_dict=False, cpu_offload=True)[m[m\r\n[32m+[m[32m _, optimizer_state_dict = get_state_dict(self.model, optimizers=self.optimizer, options=options)[m[m\r\n if self.keys_filter:[m[m\r\n optimizer_state_dict = {k: v for k, v in optimizer_state_dict.items() if self.keys_filter(k)}[m[m\r\n return {""optim"": optimizer_state_dict}[m[m\r\n [m[m\r\n def load_state_dict(self, state_dict):[m[m\r\n[31m- options = None[m[m\r\n[32m+[m[32m options = State[7mD[27m[32mictOptions(cpu_offload=True)[m[m\r\n if self.keys_filter:[m[m\r\n # For filtered loading (e.g., LoRA), use strict=False to allow partial loading[m[m\r\n[31m- options = State[7mD[27m[31mictOptions(strict=False)[m[m\r\n[32m+[m[32m options.strict = False[m[m\r\n set_state_dict([m[m\r\n self.model, optimizers=self.optimizer, [m[m\r\n model_state_dict=None, [m[m\r\n[36m@@ -137,12 +139,14 @@[m [mdef load(actor: Any) -> dict[str, Any] | None:[m[m\r\n keys_filter = lambda k: ""lora_"" in k[m[m\r\n logger.info(""[FS[7mD[27mP] LoRA mode: loading only LoRA weights from checkpoint"")[m[m\r\n [m[m\r\n[32m+[m[32m dp_group = getattr(actor, ""dp_group"", None)[m[m\r\n[32m+[m[m\r\n:[K",,terminal_output
|
| 30 |
+
30,671917,"TERMINAL",0,0,"\r[K/\r[K # Load model weights (always)[m[m\r\n model_state = ModelState(actor.model, keys_filter=keys_filter)[m[m\r\n state_dict = {""model_state"": model_state}[m[m\r\n [m[m\r\n try:[m[m\r\n[31m- dcp.load(state_dict=state_dict, checkpoint_id=str(model_dir))[m[m\r\n[32m+[m[32m dcp.load(state_dict=state_dict, checkpoint_id=str(model_dir), process_group=dp_group)[m[m\r\n logger.info(f""[FS[7mD[27mP] Loaded model from {model_dir}"")[m[m\r\n:[K",,terminal_output
|
| 31 |
+
31,672882,"TERMINAL",0,0,"\r[K except Exception as e:[m[m\r\n:[K",,terminal_output
|
| 32 |
+
32,673068,"TERMINAL",0,0,"\r[K logger.error(f""[FS[7mD[27mP] Failed to load model from {model_dir}: {e}"")[m[m\r\n:[K",,terminal_output
|
| 33 |
+
33,673274,"TERMINAL",0,0,"\r[K[36m@@ -154,7 +158,7 @@[m [mdef load(actor: Any) -> dict[str, Any] | None:[m[m\r\n:[K",,terminal_output
|
| 34 |
+
34,673736,"TERMINAL",0,0,"\r[K optimizer_state = OptimizerState(actor.model, actor.optimizer, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 35 |
+
35,673888,"TERMINAL",0,0,"\r[K optim_state_dict = {""optim_state"": optimizer_state}[m[m\r\n:[K",,terminal_output
|
| 36 |
+
36,674054,"TERMINAL",0,0,"\r[K[HM[32m+[m[32m options.strict = False[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 37 |
+
37,674195,"TERMINAL",0,0,"\r[K[HM[31m- options = State[7mD[27m[31mictOptions(strict=False)[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 38 |
+
38,674369,"TERMINAL",0,0,"\r[K[HM # For filtered loading (e.g., LoRA), use strict=False to allow partial loading[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 39 |
+
39,674526,"TERMINAL",0,0,"\r[K[HM if self.keys_filter:[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 40 |
+
40,674685,"TERMINAL",0,0,"\r[K[HM[32m+[m[32m options = State[7mD[27m[32mictOptions(cpu_offload=True)[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 41 |
+
41,674867,"TERMINAL",0,0,"\r[K[HM[31m- options = None[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 42 |
+
42,675016,"TERMINAL",0,0,"\r[K[HM def load_state_dict(self, state_dict):[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 43 |
+
43,675255,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K\r[K[31m- dcp.load(state_dict=state_dict, checkpoint_id=str(model_dir))[m[m\r\n:[K",,terminal_output
|
| 44 |
+
44,675356,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.load(state_dict=state_dict, checkpoint_id=str(model_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 45 |
+
45,675536,"TERMINAL",0,0,"\r[K logger.info(f""[FS[7mD[27mP] Loaded model from {model_dir}"")[m[m\r\n:[K",,terminal_output
|
| 46 |
+
46,675686,"TERMINAL",0,0,"\r[K except Exception as e:[m[m\r\n:[K",,terminal_output
|
| 47 |
+
47,675855,"TERMINAL",0,0,"\r[K logger.error(f""[FS[7mD[27mP] Failed to load model from {model_dir}: {e}"")[m[m\r\n:[K",,terminal_output
|
| 48 |
+
48,676331,"TERMINAL",0,0,"\r[K[36m@@ -154,7 +158,7 @@[m [mdef load(actor: Any) -> dict[str, Any] | None:[m[m\r\n:[K",,terminal_output
|
| 49 |
+
49,676333,"TERMINAL",0,0,"\r[K optimizer_state = OptimizerState(actor.model, actor.optimizer, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 50 |
+
50,676456,"TERMINAL",0,0,"\r[K optim_state_dict = {""optim_state"": optimizer_state}[m[m\r\n:[K",,terminal_output
|
| 51 |
+
51,676528,"TERMINAL",0,0,"\r[K try:[m[m\r\n:[K",,terminal_output
|
| 52 |
+
52,678986,"TERMINAL",0,0,"\r[K[31m- dcp.load(state_dict=optim_state_dict, checkpoint_id=str(optimizer_dir))[m[m\r\n:[K",,terminal_output
|
| 53 |
+
53,679154,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.load(state_dict=optim_state_dict, checkpoint_id=str(optimizer_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 54 |
+
54,679330,"TERMINAL",0,0,"\r[K logger.info(f""[FS[7mD[27mP] Loaded optimizer from {optimizer_dir}"")[m[m\r\n:[K",,terminal_output
|
| 55 |
+
55,680566,"TERMINAL",0,0,"\r[K except Exception as e:[m[m\r\n:[K",,terminal_output
|
| 56 |
+
56,680751,"TERMINAL",0,0,"\r[K logger.warning(f""[FS[7mD[27mP] Failed to load optimizer from {optimizer_dir}: {e}"")[m[m\r\n:[K",,terminal_output
|
| 57 |
+
57,680959,"TERMINAL",0,0,"\r[K[36m@@ -167,7 +171,7 @@[m [mdef load(actor: Any) -> dict[str, Any] | None:[m[m\r\n:[K",,terminal_output
|
| 58 |
+
58,681132,"TERMINAL",0,0,"\r[K lr_scheduler_state = LRSchedulerState(actor.lr_scheduler)[m[m\r\n:[K",,terminal_output
|
| 59 |
+
59,681510,"TERMINAL",0,0,"\r[K lr_scheduler_state_dict = {""lr_scheduler_state"": lr_scheduler_state}[m[m\r\n:[K",,terminal_output
|
| 60 |
+
60,681691,"TERMINAL",0,0,"\r[K try:[m[m\r\n:[K",,terminal_output
|
| 61 |
+
61,681898,"TERMINAL",0,0,"\r[K[31m- dcp.load(state_dict=lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir))[m[m\r\n:[K",,terminal_output
|
| 62 |
+
62,683126,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.load(state_dict=lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 63 |
+
63,683584,"TERMINAL",0,0,"\r[K logger.info(f""[FS[7mD[27mP] Loaded LR scheduler from {lr_scheduler_dir}"")[m[m\r\n:[K",,terminal_output
|
| 64 |
+
64,684050,"TERMINAL",0,0,"\r[K except Exception as e:[m[m\r\n:[K",,terminal_output
|
| 65 |
+
65,689555,"TERMINAL",0,0,"\r[K logger.warning(f""[FS[7mD[27mP] Failed to load LR scheduler from {lr_scheduler_dir}: {e}"")[m[m\r\n:[K",,terminal_output
|
| 66 |
+
66,689744,"TERMINAL",0,0,"\r[K[36m@@ -238,22 +242,29 @@[m [mdef save(actor: Any, iteration: int, keys_filter=None) -> None:[m[m\r\n:[K",,terminal_output
|
| 67 |
+
67,689842,"TERMINAL",0,0,"\r[K lr_scheduler_dir.mkdir(parents=True, exist_ok=True)[m[m\r\n:[K",,terminal_output
|
| 68 |
+
68,689991,"TERMINAL",0,0,"\r[K dist.barrier()[m[m\r\n:[K",,terminal_output
|
| 69 |
+
69,690170,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 70 |
+
70,690374,"TERMINAL",0,0,"\r[K[32m+[m[32m cp_size = getattr(actor, ""cp_size"", 1)[m[m\r\n:[K",,terminal_output
|
| 71 |
+
71,690578,"TERMINAL",0,0,"\r[K[32m+[m[32m cp_rank = getattr(actor, ""cp_rank"", 0)[m[m\r\n:[K",,terminal_output
|
| 72 |
+
72,690764,"TERMINAL",0,0,"\r[K[32m+[m[32m dp_group = getattr(actor, ""dp_group"", None)[m[m\r\n:[K",,terminal_output
|
| 73 |
+
73,690910,"TERMINAL",0,0,"\r[K[32m+[m[m\r\n:[K",,terminal_output
|
| 74 |
+
74,691091,"TERMINAL",0,0,"\r[K # Save model weights[m[m\r\n:[K",,terminal_output
|
| 75 |
+
75,691283,"TERMINAL",0,0,"\r[K[31m- model_state = ModelState(actor.model, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 76 |
+
76,691440,"TERMINAL",0,0,"\r[K[31m- state_dict = {""model_state"": model_state}[m[m\r\n:[K",,terminal_output
|
| 77 |
+
77,691614,"TERMINAL",0,0,"\r[K[31m- dcp.save(state_dict, checkpoint_id=str(model_dir))[m[m\r\n:[K",,terminal_output
|
| 78 |
+
78,691932,"TERMINAL",0,0,"\r[K[32m+[m[32m if cp_rank == 0:[m[m\r\n:[K",,terminal_output
|
| 79 |
+
79,692156,"TERMINAL",0,0,"\r[K[32m+[m[32m model_state = ModelState(actor.model, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 80 |
+
80,692216,"TERMINAL",0,0,"\r[K[32m+[m[32m state_dict = {""model_state"": model_state}[m[m\r\n:[K",,terminal_output
|
| 81 |
+
81,692405,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.save(state_dict, checkpoint_id=str(model_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 82 |
+
82,692546,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 83 |
+
83,692692,"TERMINAL",0,0,"\r[K # Save optimizer state[m[m\r\n:[K",,terminal_output
|
| 84 |
+
84,692864,"TERMINAL",0,0,"\r[K if hasattr(actor, ""optimizer"") and actor.optimizer is not None:[m[m\r\n:[K",,terminal_output
|
| 85 |
+
85,692992,"TERMINAL",0,0,"\r[K[31m- optimizer_state = OptimizerState(actor.model, actor.optimizer, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 86 |
+
86,693163,"TERMINAL",0,0,"\r[K[31m- optim_state_dict = {""optim_state"": optimizer_state}[m[m\r\n:[K",,terminal_output
|
| 87 |
+
87,693318,"TERMINAL",0,0,"\r[K[31m- dcp.save(optim_state_dict, checkpoint_id=str(optimizer_dir))[m[m\r\n:[K",,terminal_output
|
| 88 |
+
88,693476,"TERMINAL",0,0,"\r[K[32m+[m[32m if cp_rank == 0:[m[m\r\n:[K",,terminal_output
|
| 89 |
+
89,693632,"TERMINAL",0,0,"\r[K[32m+[m[32m optimizer_state = OptimizerState(actor.model, actor.optimizer, keys_filter=keys_filter)[m[m\r\n:[K",,terminal_output
|
| 90 |
+
90,693796,"TERMINAL",0,0,"\r[K[32m+[m[32m optim_state_dict = {""optim_state"": optimizer_state}[m[m\r\n:[K",,terminal_output
|
| 91 |
+
91,693948,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.save(optim_state_dict, checkpoint_id=str(optimizer_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 92 |
+
92,694169,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 93 |
+
93,694342,"TERMINAL",0,0,"\r[K # Save LR scheduler state[m[m\r\n:[K",,terminal_output
|
| 94 |
+
94,694546,"TERMINAL",0,0,"\r[K if hasattr(actor, ""lr_scheduler"") and actor.lr_scheduler is not None:[m[m\r\n:[K",,terminal_output
|
| 95 |
+
95,694704,"TERMINAL",0,0,"\r[K[31m- lr_scheduler_state = LRSchedulerState(actor.lr_scheduler)[m[m\r\n:[K",,terminal_output
|
| 96 |
+
96,694886,"TERMINAL",0,0,"\r[K[31m- lr_scheduler_state_dict = {""lr_scheduler_state"": lr_scheduler_state}[m[m\r\n:[K",,terminal_output
|
| 97 |
+
97,695066,"TERMINAL",0,0,"\r[K[31m- dcp.save(lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir))[m[m\r\n:[K",,terminal_output
|
| 98 |
+
98,695258,"TERMINAL",0,0,"\r[K[32m+[m[32m if cp_rank == 0:[m[m\r\n:[K",,terminal_output
|
| 99 |
+
99,695477,"TERMINAL",0,0,"\r[K[32m+[m[32m lr_scheduler_state = LRSchedulerState(actor.lr_scheduler)[m[m\r\n:[K",,terminal_output
|
| 100 |
+
100,696626,"TERMINAL",0,0,"\r[K[32m+[m[32m lr_scheduler_state_dict = {""lr_scheduler_state"": lr_scheduler_state}[m[m\r\n:[K",,terminal_output
|
| 101 |
+
101,696810,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.save(lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 102 |
+
102,697057,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 103 |
+
103,697176,"TERMINAL",0,0,"\r[K if dist.get_rank() == 0:[m[m\r\n:[K",,terminal_output
|
| 104 |
+
104,697392,"TERMINAL",0,0,"\r[K rng_state = {""torch"": torch.get_rng_state()}[m[m\r\n:[K",,terminal_output
|
| 105 |
+
105,697841,"TERMINAL",0,0,"\r[K[HM[31m- dcp.load(state_dict=lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir))[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 106 |
+
106,698011,"TERMINAL",0,0,"\r[K[HM try:[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 107 |
+
107,698192,"TERMINAL",0,0,"\r[K[HM lr_scheduler_state_dict = {""lr_scheduler_state"": lr_scheduler_state}[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 108 |
+
108,698360,"TERMINAL",0,0,"\r[K[HM lr_scheduler_state = LRSchedulerState(actor.lr_scheduler)[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 109 |
+
109,698514,"TERMINAL",0,0,"\r[K[HM[36m@@ -167,7 +171,7 @@[m [mdef load(actor: Any) -> dict[str, Any] | None:[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 110 |
+
110,698770,"TERMINAL",0,0,"\r[K[HM logger.warning(f""[FS[7mD[27mP] Failed to load optimizer from {optimizer_dir}: {e}"")[m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 111 |
+
111,700540,"TERMINAL",0,0,"\r[K[32m+[m[32m lr_scheduler_state = LRSchedulerState(actor.lr_scheduler)[m[m\r\n:[K",,terminal_output
|
| 112 |
+
112,700714,"TERMINAL",0,0,"\r[K[32m+[m[32m lr_scheduler_state_dict = {""lr_scheduler_state"": lr_scheduler_state}[m[m\r\n:[K",,terminal_output
|
| 113 |
+
113,700880,"TERMINAL",0,0,"\r[K[32m+[m[32m dcp.save(lr_scheduler_state_dict, checkpoint_id=str(lr_scheduler_dir), process_group=dp_group)[m[m\r\n:[K",,terminal_output
|
| 114 |
+
114,701043,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 115 |
+
115,701235,"TERMINAL",0,0,"\r[K if dist.get_rank() == 0:[m[m\r\n:[K",,terminal_output
|
| 116 |
+
116,701443,"TERMINAL",0,0,"\r[K rng_state = {""torch"": torch.get_rng_state()}[m[m\r\n:[K",,terminal_output
|
| 117 |
+
117,701578,"TERMINAL",0,0,"\r[K[1mdiff --git a/train_sft.py b/train_sft.py[m[m\r\n:[K",,terminal_output
|
| 118 |
+
118,701927,"TERMINAL",0,0,"\r[K[1mindex e494e0d..1f9422a 100644[m[m\r\n:[K",,terminal_output
|
| 119 |
+
119,702105,"TERMINAL",0,0,"\r[K[1m--- a/train_sft.py[m[m\r\n:[K",,terminal_output
|
| 120 |
+
120,702288,"TERMINAL",0,0,"\r[K[1m+++ b/train_sft.py[m[m\r\n:[K",,terminal_output
|
| 121 |
+
121,702489,"TERMINAL",0,0,"\r[K[36m@@ -536,7 +536,7 @@[m [mclass SFTTrainer:[m[m\r\n:[K",,terminal_output
|
| 122 |
+
122,702811,"TERMINAL",0,0,"\r[K """"""Execute one training step.""""""[m[m\r\n:[K",,terminal_output
|
| 123 |
+
123,702969,"TERMINAL",0,0,"\r[K # Prepare model inputs[m[m\r\n:[K\r[K model_args = self._get_model_inputs_args(packed_batch)[m[m\r\n:[K",,terminal_output
|
| 124 |
+
124,703176,"TERMINAL",0,0,"\r[K[31m- logits = self.model(**model_args).logits.squeeze(0).float()[m[m\r\n:[K",,terminal_output
|
| 125 |
+
125,703331,"TERMINAL",0,0,"\r[K[32m+[m[32m logits = self.model(**model_args).logits.squeeze(0)[m[m\r\n:[K",,terminal_output
|
| 126 |
+
126,703528,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 127 |
+
127,703651,"TERMINAL",0,0,"\r[K # Compute log probs and entropy (unified for both CP and non-CP modes)[m[m\r\n:[K",,terminal_output
|
| 128 |
+
128,703831,"TERMINAL",0,0,"\r[K log_probs, entropy_result = get_logprob_and_entropy_with_cp([m[m\r\n:[K",,terminal_output
|
| 129 |
+
129,704083,"TERMINAL",0,0,"\r[K[36m@@ -662,7 +662,7 @@[m [mclass SFTTrainer:[m[m\r\n:[K",,terminal_output
|
| 130 |
+
130,704199,"TERMINAL",0,0,"\r[K def _val_step(self, packed_batch):[m[m\r\n:[K",,terminal_output
|
| 131 |
+
131,704477,"TERMINAL",0,0,"\r[K model_args = self._get_model_inputs_args(packed_batch)[m[m\r\n:[K",,terminal_output
|
| 132 |
+
132,704756,"TERMINAL",0,0,"\r[K with torch.no_grad():[m[m\r\n:[K",,terminal_output
|
| 133 |
+
133,704855,"TERMINAL",0,0,"\r[K[31m- logits = self.model(**model_args).logits.squeeze(0).float()[m[m\r\n:[K",,terminal_output
|
| 134 |
+
134,705001,"TERMINAL",0,0,"\r[K[32m+[m[32m logits = self.model(**model_args).logits.squeeze(0)[m[m\r\n:[K",,terminal_output
|
| 135 |
+
135,705140,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 136 |
+
136,705631,"TERMINAL",0,0,"\r[K # Compute log probs and entropy (unified for both CP and non-CP modes)[m[m\r\n:[K\r[K log_probs, entropy_result = get_logprob_and_entropy_with_cp([m[m\r\n:[K\r[K[36m@@ -718,6 +718,8 @@[m [mclass SFTTrainer:[m[m\r\n:[K",,terminal_output
|
| 137 |
+
137,705803,"TERMINAL",0,0,"\r[K if should_run_periodic_action([m[m\r\n:[K",,terminal_output
|
| 138 |
+
138,705960,"TERMINAL",0,0,"\r[K rollout_id, self.args.save_interval, self.num_rollout_per_epoch[m[m\r\n:[K",,terminal_output
|
| 139 |
+
139,706149,"TERMINAL",0,0,"\r[K ):[m[m\r\n:[K",,terminal_output
|
| 140 |
+
140,706304,"TERMINAL",0,0,"\r[K[32m+[m[32m torch.cuda.empty_cache() # <--- A[7mDD[27m[32m THIS LINE[m[m\r\n:[K",,terminal_output
|
| 141 |
+
141,706464,"TERMINAL",0,0,"\r[K[32m+[m[32m torch.distributed.barrier() # <--- A[7mDD[27m[32m THIS LINE (ensures all ranks pause here)[m[m\r\n:[K",,terminal_output
|
| 142 |
+
142,706632,"TERMINAL",0,0,"\r[K self.save_model(rollout_id)[m[m\r\n:[K",,terminal_output
|
| 143 |
+
143,706823,"TERMINAL",0,0,"\r[K [m[m\r\n:[K",,terminal_output
|
| 144 |
+
144,707000,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 145 |
+
145,707156,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 146 |
+
146,707327,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 147 |
+
147,707491,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 148 |
+
148,707911,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 149 |
+
149,708124,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 150 |
+
150,708223,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 151 |
+
151,708411,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 152 |
+
152,708580,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 153 |
+
153,708763,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 154 |
+
154,708944,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 155 |
+
155,709113,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 156 |
+
156,709281,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 157 |
+
157,709456,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 158 |
+
158,709627,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 159 |
+
159,709793,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 160 |
+
160,709955,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 161 |
+
161,710142,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 162 |
+
162,710310,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 163 |
+
163,710474,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 164 |
+
164,710662,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 165 |
+
165,710847,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 166 |
+
166,711034,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 167 |
+
167,711180,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 168 |
+
168,711340,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 169 |
+
169,711526,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 170 |
+
170,711703,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 171 |
+
171,711880,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 172 |
+
172,712094,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 173 |
+
173,712287,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 174 |
+
174,712502,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 175 |
+
175,712686,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 176 |
+
176,712936,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 177 |
+
177,713827,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 178 |
+
178,714201,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 179 |
+
179,714621,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 180 |
+
180,714822,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 181 |
+
181,715054,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 182 |
+
182,715224,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 183 |
+
183,715416,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 184 |
+
184,715542,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 185 |
+
185,715703,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 186 |
+
186,715845,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 187 |
+
187,715987,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 188 |
+
188,716103,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 189 |
+
189,716246,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 190 |
+
190,716320,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 191 |
+
191,1085087,"TERMINAL",0,0,"\r[K[HM [m[m\r\n[44;1H\r[K:[K",,terminal_output
|
| 192 |
+
192,1085463,"TERMINAL",0,0,"\r[K # Calculate val loss periodically[m[m\r\n[7m(END)[27m[K",,terminal_output
|
| 193 |
+
193,1085642,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 194 |
+
194,1090638,"TERMINAL",0,0,"\r[K[?1l>]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 195 |
+
195,1101278,"TERMINAL",0,0,"cd ${SCRATCH}",,terminal_command
|
| 196 |
+
196,1101284,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp",,terminal_output
|
| 197 |
+
197,1101627,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 198 |
+
198,1101648,"TERMINAL",0,0,"]633;C[0m[01;34midm[0m [01;34mlogs[0m [01;34mmihir[0m [01;34mnguyen31[0m [01;34myll[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp",,terminal_output
|
| 199 |
+
199,1115895,"TERMINAL",0,0,"cd mihir/huggingface/shared_data/",,terminal_command
|
| 200 |
+
200,1115899,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 201 |
+
201,1117660,"TERMINAL",0,0,"l",,terminal_command
|
| 202 |
+
202,1117667,"TERMINAL",0,0,"]633;Cbash: l: command not found\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 203 |
+
203,1118314,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 204 |
+
204,1118319,"TERMINAL",0,0,"]633;C[0m[01;34m13024253[0m [01;34m13025345[0m [01;34m13030388[0m [01;34m13030392[0m [01;34m13030396[0m [01;34m13030400[0m [01;34m13031982[0m [01;34m13032802[0m [01;34m13032806[0m [01;34m13032811[0m [01;34m13032815[0m [01;34m13032820[0m\r\n[01;34m13025339[0m [01;34m13025365[0m [01;34m13030389[0m [01;34m13030393[0m [01;34m13030397[0m [01;34m13031978[0m [01;34m13032780[0m [01;34m13032803[0m [01;34m13032807[0m [01;34m13032812[0m [01;34m13032816[0m [01;34m13032821[0m\r\n[01;34m13025341[0m [01;34m13030386[0m [01;34m13030390[0m [01;34m13030394[0m [01;34m13030398[0m [01;34m13031979[0m [01;34m13032800[0m [01;34m13032804[0m [01;34m13032809[0m [01;34m13032813[0m [01;34m13032817[0m [01;34m13034357[0m\r\n[01;34m13025342[0m [01;34m13030387[0m [01;34m13030391[0m [01;34m13030395[0m [01;34m13030399[0m [01;34m13031981[0m [01;34m13032801[0m [01;34m13032805[0m [01;34m13032810[0m [01;34m13032814[0m [01;34m13032818[0m [01;34mqwen3-600M-fsdp-1116-noref[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 205 |
+
205,1123484,"TERMINAL",0,0,"cd 13034357",,terminal_command
|
| 206 |
+
206,1124592,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 207 |
+
207,1124605,"TERMINAL",0,0,"]633;C[0m[01;34mcheckpoints[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357",,terminal_output
|
| 208 |
+
208,1126601,"TERMINAL",0,0,"cd checkpoints/",,terminal_command
|
| 209 |
+
209,1126898,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 210 |
+
210,1126969,"TERMINAL",0,0,"]633;C[0m[01;34miter_0001000[0m [01;34miter_0002000[0m [01;34miter_0003000[0m [01;34miter_0004000[0m [01;34miter_0005000[0m latest_checkpointed_iteration.txt [01;34mrollout[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357/checkpoints",,terminal_output
|
| 211 |
+
211,1130166,"TERMINAL",0,0,"ls --color=auto iter_0001000/",,terminal_command
|
| 212 |
+
212,1130169,"TERMINAL",0,0,"]633;C[0m[01;34mlr_scheduler[0m meta.json [01;34mmodel[0m [01;34moptimizer[0m rng.pt\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357/checkpoints",,terminal_output
|
| 213 |
+
213,1306846,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 214 |
+
214,1306849,"TERMINAL",0,0,"]633;C[0m[01;34miter_0001000[0m [01;34miter_0002000[0m [01;34miter_0003000[0m [01;34miter_0004000[0m [01;34miter_0005000[0m latest_checkpointed_iteration.txt [01;34mrollout[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357/checkpoints",,terminal_output
|
| 215 |
+
215,1309990,"TERMINAL",0,0,"du -h .",,terminal_command
|
| 216 |
+
216,1310043,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 217 |
+
217,1310226,"TERMINAL",0,0,"40K\t./iter_0003000/lr_scheduler\r\n",,terminal_output
|
| 218 |
+
218,1310370,"TERMINAL",0,0,"62G\t./iter_0003000/optimizer\r\n",,terminal_output
|
| 219 |
+
219,1310513,"TERMINAL",0,0,"31G\t./iter_0003000/model\r\n92G\t./iter_0003000\r\n",,terminal_output
|
| 220 |
+
220,1310776,"TERMINAL",0,0,"40K\t./iter_0004000/lr_scheduler\r\n62G\t./iter_0004000/optimizer\r\n31G\t./iter_0004000/model\r\n92G\t./iter_0004000\r\n3.0K\t./rollout\r\n",,terminal_output
|
| 221 |
+
221,1310892,"TERMINAL",0,0,"40K\t./iter_0005000/lr_scheduler\r\n62G\t./iter_0005000/optimizer\r\n",,terminal_output
|
| 222 |
+
222,1310944,"TERMINAL",0,0,"31G\t./iter_0005000/model\r\n92G\t./iter_0005000\r\n",,terminal_output
|
| 223 |
+
223,1311035,"TERMINAL",0,0,"40K\t./iter_0001000/lr_scheduler\r\n",,terminal_output
|
| 224 |
+
224,1311105,"TERMINAL",0,0,"62G\t./iter_0001000/optimizer\r\n",,terminal_output
|
| 225 |
+
225,1311234,"TERMINAL",0,0,"31G\t./iter_0001000/model\r\n92G\t./iter_0001000\r\n40K\t./iter_0002000/lr_scheduler\r\n62G\t./iter_0002000/optimizer\r\n31G\t./iter_0002000/model\r\n92G\t./iter_0002000\r\n459G\t.\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357/checkpoints",,terminal_output
|
| 226 |
+
226,1316248,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 227 |
+
227,1316250,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357",,terminal_output
|
| 228 |
+
228,1316616,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 229 |
+
229,1316624,"TERMINAL",0,0,"]633;C[0m[01;34mcheckpoints[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13034357",,terminal_output
|
| 230 |
+
230,1317548,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 231 |
+
231,1317790,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 232 |
+
232,1317811,"TERMINAL",0,0,"]633;C[0m[01;34m13024253[0m [01;34m13025345[0m [01;34m13030388[0m [01;34m13030392[0m [01;34m13030396[0m [01;34m13030400[0m [01;34m13031982[0m [01;34m13032802[0m [01;34m13032806[0m [01;34m13032811[0m [01;34m13032815[0m [01;34m13032820[0m\r\n[01;34m13025339[0m [01;34m13025365[0m [01;34m13030389[0m [01;34m13030393[0m [01;34m13030397[0m [01;34m13031978[0m [01;34m13032780[0m [01;34m13032803[0m [01;34m13032807[0m [01;34m13032812[0m [01;34m13032816[0m [01;34m13032821[0m\r\n[01;34m13025341[0m [01;34m13030386[0m [01;34m13030390[0m [01;34m13030394[0m [01;34m13030398[0m [01;34m13031979[0m [01;34m13032800[0m [01;34m13032804[0m [01;34m13032809[0m [01;34m13032813[0m [01;34m13032817[0m [01;34m13034357[0m\r\n[01;34m13025342[0m [01;34m13030387[0m [01;34m13030391[0m [01;34m13030395[0m [01;34m13030399[0m [01;34m13031981[0m [01;34m13032801[0m [01;34m13032805[0m [01;34m13032810[0m [01;34m13032814[0m [01;34m13032818[0m [01;34mqwen3-600M-fsdp-1116-noref[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 233 |
+
233,1350656,"TERMINAL",0,0,"cd 13032805",,terminal_command
|
| 234 |
+
234,1351095,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 235 |
+
235,1351111,"TERMINAL",0,0,"]633;C[0m[01;34mcheckpoints[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13032805",,terminal_output
|
| 236 |
+
236,1353850,"TERMINAL",0,0,"du -h .",,terminal_command
|
| 237 |
+
237,1353898,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 238 |
+
238,1354011,"TERMINAL",0,0,"36K\t./checkpoints/iter_0014000/lr_scheduler\r\n",,terminal_output
|
| 239 |
+
239,1354098,"TERMINAL",0,0,"13G\t./checkpoints/iter_0014000/optimizer\r\n",,terminal_output
|
| 240 |
+
240,1354277,"TERMINAL",0,0,"7.6G\t./checkpoints/iter_0014000/model\r\n21G\t./checkpoints/iter_0014000\r\n36K\t./checkpoints/iter_0015000/lr_scheduler\r\n13G\t./checkpoints/iter_0015000/optimizer\r\n7.6G\t./checkpoints/iter_0015000/model\r\n21G\t./checkpoints/iter_0015000\r\n",,terminal_output
|
| 241 |
+
241,1354394,"TERMINAL",0,0,"36K\t./checkpoints/iter_0007000/lr_scheduler\r\n",,terminal_output
|
| 242 |
+
242,1354469,"TERMINAL",0,0,"13G\t./checkpoints/iter_0007000/optimizer\r\n7.6G\t./checkpoints/iter_0007000/model\r\n21G\t./checkpoints/iter_0007000\r\n",,terminal_output
|
| 243 |
+
243,1354534,"TERMINAL",0,0,"36K\t./checkpoints/iter_0003000/lr_scheduler\r\n",,terminal_output
|
| 244 |
+
244,1354590,"TERMINAL",0,0,"13G\t./checkpoints/iter_0003000/optimizer\r\n",,terminal_output
|
| 245 |
+
245,1354684,"TERMINAL",0,0,"7.6G\t./checkpoints/iter_0003000/model\r\n21G\t./checkpoints/iter_0003000\r\n",,terminal_output
|
| 246 |
+
246,1354740,"TERMINAL",0,0,"36K\t./checkpoints/iter_0011000/lr_scheduler\r\n",,terminal_output
|
| 247 |
+
247,1355757,"TERMINAL",0,0,"13G\t./checkpoints/iter_0011000/optimizer\r\n7.6G\t./checkpoints/iter_0011000/model\r\n21G\t./checkpoints/iter_0011000\r\n36K\t./checkpoints/iter_0013000/lr_scheduler\r\n13G\t./checkpoints/iter_0013000/optimizer\r\n7.6G\t./checkpoints/iter_0013000/model\r\n21G\t./checkpoints/iter_0013000\r\n36K\t./checkpoints/iter_0017000/lr_scheduler\r\n13G\t./checkpoints/iter_0017000/optimizer\r\n7.6G\t./checkpoints/iter_0017000/model\r\n21G\t./checkpoints/iter_0017000\r\n36K\t./checkpoints/iter_0012000/lr_scheduler\r\n13G\t./checkpoints/iter_0012000/optimizer\r\n7.6G\t./checkpoints/iter_0012000/model\r\n21G\t./checkpoints/iter_0012000\r\n36K\t./checkpoints/iter_0016000/lr_scheduler\r\n13G\t./checkpoints/iter_0016000/optimizer\r\n7.6G\t./checkpoints/iter_0016000/model\r\n21G\t./checkpoints/iter_0016000\r\n36K\t./checkpoints/iter_0008000/lr_scheduler\r\n13G\t./checkpoints/iter_0008000/optimizer\r\n7.6G\t./checkpoints/iter_0008000/model\r\n21G\t./checkpoints/iter_0008000\r\n36K\t./checkpoints/iter_0020000/lr_scheduler\r\n13G\t./checkpoints/iter_0020000/optimizer\r\n7.6G\t./checkpoints/iter_0020000/model\r\n21G\t./checkpoints/iter_0020000\r\n36K\t./checkpoints/iter_0010000/lr_scheduler\r\n13G\t./checkpoints/iter_0010000/optimizer\r\n7.6G\t./checkpoints/iter_0010000/model\r\n21G\t./checkpoints/iter_0010000\r\n36K\t./checkpoints/iter_0018000/lr_scheduler\r\n13G\t./checkpoints/iter_0018000/optimizer\r\n7.6G\t./checkpoints/iter_0018000/model\r\n21G\t./checkpoints/iter_0018000\r\n36K\t./checkpoints/iter_0004000/lr_scheduler\r\n13G\t./checkpoints/iter_0004000/optimizer\r\n7.6G\t./checkpoints/iter_0004000/model\r\n21G\t./checkpoints/iter_0004000\r\n36K\t./checkpoints/iter_0009000/lr_scheduler\r\n13G\t./checkpoints/iter_0009000/optimizer\r\n7.6G\t./checkpoints/iter_0009000/model\r\n21G\t./checkpoints/iter_0009000\r\n11K\t./checkpoints/rollout\r\n",,terminal_output
|
| 248 |
+
248,1355997,"TERMINAL",0,0,"36K\t./checkpoints/iter_0005000/lr_scheduler\r\n13G\t./checkpoints/iter_0005000/optimizer\r\n7.6G\t./checkpoints/iter_0005000/model\r\n21G\t./checkpoints/iter_0005000\r\n36K\t./checkpoints/iter_0019000/lr_scheduler\r\n13G\t./checkpoints/iter_0019000/optimizer\r\n7.6G\t./checkpoints/iter_0019000/model\r\n21G\t./checkpoints/iter_0019000\r\n",,terminal_output
|
| 249 |
+
249,1356176,"TERMINAL",0,0,"36K\t./checkpoints/iter_0001000/lr_scheduler\r\n13G\t./checkpoints/iter_0001000/optimizer\r\n7.6G\t./checkpoints/iter_0001000/model\r\n21G\t./checkpoints/iter_0001000\r\n36K\t./checkpoints/iter_0002000/lr_scheduler\r\n13G\t./checkpoints/iter_0002000/optimizer\r\n7.6G\t./checkpoints/iter_0002000/model\r\n21G\t./checkpoints/iter_0002000\r\n36K\t./checkpoints/iter_0006000/lr_scheduler\r\n13G\t./checkpoints/iter_0006000/optimizer\r\n7.6G\t./checkpoints/iter_0006000/model\r\n21G\t./checkpoints/iter_0006000\r\n409G\t./checkpoints\r\n409G\t.\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13032805",,terminal_output
|
| 250 |
+
250,1379857,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 251 |
+
251,1379863,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 252 |
+
252,1380382,"TERMINAL",0,0,"sl",,terminal_command
|
| 253 |
+
253,1380389,"TERMINAL",0,0,"]633;Cbash: sl: command not found\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 254 |
+
254,1381246,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 255 |
+
255,1381264,"TERMINAL",0,0,"]633;C[0m[01;34m13024253[0m [01;34m13025345[0m [01;34m13030388[0m [01;34m13030392[0m [01;34m13030396[0m [01;34m13030400[0m [01;34m13031982[0m [01;34m13032802[0m [01;34m13032806[0m [01;34m13032811[0m [01;34m13032815[0m [01;34m13032820[0m\r\n[01;34m13025339[0m [01;34m13025365[0m [01;34m13030389[0m [01;34m13030393[0m [01;34m13030397[0m [01;34m13031978[0m [01;34m13032780[0m [01;34m13032803[0m [01;34m13032807[0m [01;34m13032812[0m [01;34m13032816[0m [01;34m13032821[0m\r\n[01;34m13025341[0m [01;34m13030386[0m [01;34m13030390[0m [01;34m13030394[0m [01;34m13030398[0m [01;34m13031979[0m [01;34m13032800[0m [01;34m13032804[0m [01;34m13032809[0m [01;34m13032813[0m [01;34m13032817[0m [01;34m13034357[0m\r\n[01;34m13025342[0m [01;34m13030387[0m [01;34m13030391[0m [01;34m13030395[0m [01;34m13030399[0m [01;34m13031981[0m [01;34m13032801[0m [01;34m13032805[0m [01;34m13032810[0m [01;34m13032814[0m [01;34m13032818[0m [01;34mqwen3-600M-fsdp-1116-noref[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data",,terminal_output
|
| 256 |
+
256,1383247,"TERMINAL",0,0,"cd 13032801",,terminal_command
|
| 257 |
+
257,1383654,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 258 |
+
258,1383660,"TERMINAL",0,0,"]633;C[0m[01;34mcheckpoints[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13032801",,terminal_output
|
| 259 |
+
259,1386050,"TERMINAL",0,0,"du -h .",,terminal_command
|
| 260 |
+
260,1386098,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 261 |
+
261,1386235,"TERMINAL",0,0,"40K\t./checkpoints/iter_0004500/lr_scheduler\r\n",,terminal_output
|
| 262 |
+
262,1386350,"TERMINAL",0,0,"17K\t./checkpoints/iter_0004500/optimizer\r\n",,terminal_output
|
| 263 |
+
263,1386414,"TERMINAL",0,0,"15M\t./checkpoints/iter_0004500/model\r\n26M\t./checkpoints/iter_0004500\r\n",,terminal_output
|
| 264 |
+
264,1386558,"TERMINAL",0,0,"40K\t./checkpoints/iter_0003000/lr_scheduler\r\n",,terminal_output
|
| 265 |
+
265,1386652,"TERMINAL",0,0,"17K\t./checkpoints/iter_0003000/optimizer\r\n",,terminal_output
|
| 266 |
+
266,1386746,"TERMINAL",0,0,"15M\t./checkpoints/iter_0003000/model\r\n26M\t./checkpoints/iter_0003000\r\n",,terminal_output
|
| 267 |
+
267,1386851,"TERMINAL",0,0,"40K\t./checkpoints/iter_0001500/lr_scheduler\r\n",,terminal_output
|
| 268 |
+
268,1387056,"TERMINAL",0,0,"17K\t./checkpoints/iter_0001500/optimizer\r\n",,terminal_output
|
| 269 |
+
269,1387059,"TERMINAL",0,0,"15M\t./checkpoints/iter_0001500/model\r\n26M\t./checkpoints/iter_0001500\r\n",,terminal_output
|
| 270 |
+
270,1387119,"TERMINAL",0,0,"40K\t./checkpoints/iter_0000500/lr_scheduler\r\n",,terminal_output
|
| 271 |
+
271,1387212,"TERMINAL",0,0,"17K\t./checkpoints/iter_0000500/optimizer\r\n",,terminal_output
|
| 272 |
+
272,1387275,"TERMINAL",0,0,"15M\t./checkpoints/iter_0000500/model\r\n26M\t./checkpoints/iter_0000500\r\n",,terminal_output
|
| 273 |
+
273,1387343,"TERMINAL",0,0,"40K\t./checkpoints/iter_0004000/lr_scheduler\r\n",,terminal_output
|
| 274 |
+
274,1387418,"TERMINAL",0,0,"17K\t./checkpoints/iter_0004000/optimizer\r\n",,terminal_output
|
| 275 |
+
275,1387565,"TERMINAL",0,0,"15M\t./checkpoints/iter_0004000/model\r\n26M\t./checkpoints/iter_0004000\r\n5.5K\t./checkpoints/rollout\r\n40K\t./checkpoints/iter_0003500/lr_scheduler\r\n17K\t./checkpoints/iter_0003500/optimizer\r\n15M\t./checkpoints/iter_0003500/model\r\n26M\t./checkpoints/iter_0003500\r\n",,terminal_output
|
| 276 |
+
276,1387623,"TERMINAL",0,0,"40K\t./checkpoints/iter_0005000/lr_scheduler\r\n",,terminal_output
|
| 277 |
+
277,1387686,"TERMINAL",0,0,"17K\t./checkpoints/iter_0005000/optimizer\r\n",,terminal_output
|
| 278 |
+
278,1388042,"TERMINAL",0,0,"15M\t./checkpoints/iter_0005000/model\r\n26M\t./checkpoints/iter_0005000\r\n40K\t./checkpoints/iter_0002500/lr_scheduler\r\n17K\t./checkpoints/iter_0002500/optimizer\r\n15M\t./checkpoints/iter_0002500/model\r\n26M\t./checkpoints/iter_0002500\r\n40K\t./checkpoints/iter_0001000/lr_scheduler\r\n17K\t./checkpoints/iter_0001000/optimizer\r\n15M\t./checkpoints/iter_0001000/model\r\n26M\t./checkpoints/iter_0001000\r\n40K\t./checkpoints/iter_0002000/lr_scheduler\r\n17K\t./checkpoints/iter_0002000/optimizer\r\n15M\t./checkpoints/iter_0002000/model\r\n26M\t./checkpoints/iter_0002000\r\n259M\t./checkpoints\r\n259M\t.\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13032801",,terminal_output
|
| 279 |
+
279,1408798,"TERMINAL",0,0,"cd ../13032804",,terminal_command
|
| 280 |
+
280,1411088,"TERMINAL",0,0,"du -h .",,terminal_command
|
| 281 |
+
281,1411139,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 282 |
+
282,1411322,"TERMINAL",0,0,"40K\t./checkpoints/iter_0003000/lr_scheduler\r\n",,terminal_output
|
| 283 |
+
283,1411415,"TERMINAL",0,0,"13G\t./checkpoints/iter_0003000/optimizer\r\n",,terminal_output
|
| 284 |
+
284,1411482,"TERMINAL",0,0,"7.6G\t./checkpoints/iter_0003000/model\r\n21G\t./checkpoints/iter_0003000\r\n",,terminal_output
|
| 285 |
+
285,1411583,"TERMINAL",0,0,"40K\t./checkpoints/iter_0004000/lr_scheduler\r\n13G\t./checkpoints/iter_0004000/optimizer\r\n",,terminal_output
|
| 286 |
+
286,1411644,"TERMINAL",0,0,"7.6G\t./checkpoints/iter_0004000/model\r\n21G\t./checkpoints/iter_0004000\r\n",,terminal_output
|
| 287 |
+
287,1411707,"TERMINAL",0,0,"3.0K\t./checkpoints/rollout\r\n",,terminal_output
|
| 288 |
+
288,1411830,"TERMINAL",0,0,"40K\t./checkpoints/iter_0005000/lr_scheduler\r\n13G\t./checkpoints/iter_0005000/optimizer\r\n7.6G\t./checkpoints/iter_0005000/model\r\n21G\t./checkpoints/iter_0005000\r\n",,terminal_output
|
| 289 |
+
289,1411994,"TERMINAL",0,0,"40K\t./checkpoints/iter_0001000/lr_scheduler\r\n13G\t./checkpoints/iter_0001000/optimizer\r\n",,terminal_output
|
| 290 |
+
290,1412114,"TERMINAL",0,0,"7.6G\t./checkpoints/iter_0001000/model\r\n21G\t./checkpoints/iter_0001000\r\n40K\t./checkpoints/iter_0002000/lr_scheduler\r\n13G\t./checkpoints/iter_0002000/optimizer\r\n7.6G\t./checkpoints/iter_0002000/model\r\n21G\t./checkpoints/iter_0002000\r\n103G\t./checkpoints\r\n103G\t.\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/mihir/huggingface/shared_data/13032804",,terminal_output
|
| 291 |
+
291,1704506,"TERMINAL",0,0,"cd ${SCRATCH}",,terminal_command
|
| 292 |
+
292,1704512,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp",,terminal_output
|
| 293 |
+
293,1707885,"TERMINAL",0,0,"cd /p/home/jusers/mahajan1/juwels/projects/mahajan1/miles",,terminal_command
|
| 294 |
+
294,1707894,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 295 |
+
295,1711713,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command
|
| 296 |
+
296,1711752,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 297 |
+
297,1712931,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 298 |
+
298,1712934,"TERMINAL",0,0,"]633;Cbuild_conda.sh core.jwb0234.juwels.4036136 core.jwb0246.juwels.1957296 core.jwb0248.juwels.1760203 [0m[01;34mdocs[0m [01;34mslurm[0m\r\ncore.jwb0234.juwels.4019759 core.jwb0234.juwels.4036137 core.jwb0246.juwels.1957297 core.jwb0248.juwels.1760204 [01;34mexamples[0m [01;34mtests[0m\r\ncore.jwb0234.juwels.4019760 core.jwb0234.juwels.4036138 core.jwb0246.juwels.1957298 core.jwb0248.juwels.1760205 [01;34mimgs[0m tmp.txt\r\ncore.jwb0234.juwels.4019761 core.jwb0234.juwels.4036139 core.jwb0246.juwels.1957299 core.jwb0248.juwels.1760206 LICENSE [01;34mtools[0m\r\ncore.jwb0234.juwels.4019762 core.jwb0246.juwels.1953481 core.jwb0246.juwels.1969807 core.jwb0248.juwels.1762730 [01;34mmiles[0m train_async.py\r\ncore.jwb0234.juwels.4021039 core.jwb0246.juwels.1953482 core.jwb0246.juwels.1969808 core.jwb0248.juwels.1762732 [01;34mmiles.egg-info[0m train.py\r\ncore.jwb0234.juwels.4021040 core.jwb0246.juwels.1953483 core.jwb0246.juwels.1969809 core.jwb0248.juwels.1762733 [01;34mmiles_plugins[0m train_sft.py\r\ncore.jwb0234.juwels.4021041 core.jwb0246.juwels.1953484 core.jwb0246.juwels.1969810 core.jwb0248.juwels.1775280 pyproject.toml [01;34mwandb[0m\r\ncore.jwb0234.juwels.4021042 core.jwb0246.juwels.1954767 core.jwb0248.juwels.1758912 core.jwb0248.juwels.1775281 README.md\r\ncore.jwb0234.juwels.4023546 core.jwb0246.juwels.1954768 core.jwb0248.juwels.1758913 core.jwb0248.juwels.1775282 requirements.txt\r\ncore.jwb0234.juwels.4023547 core.jwb0246.juwels.1954769 core.jwb0248.juwels.1758914 core.jwb0248.juwels.1775283 [01;34mscripts[0m\r\ncore.jwb0234.juwels.4023548 core.jwb0246.juwels.1954770 core.jwb0248.juwels.1758915 [01;34mdocker[0m setup.py\r\n]0;mahajan1@jwlogin23:~/projects/mahajan1/miles",,terminal_output
|
| 299 |
+
299,1717970,"TERMINAL",0,0,"cd /p/scratch/envcomp/logs",,terminal_command
|
| 300 |
+
300,1717991,"TERMINAL",0,0,"]633;C]0;mahajan1@jwlogin23:/p/scratch/envcomp/logs",,terminal_output
|
| 301 |
+
301,1718311,"TERMINAL",0,0,"ls --color=auto",,terminal_command
|
| 302 |
+
302,1718317,"TERMINAL",0,0,"]633;C[0m[01;34mANTA_CLAUS[0m\r\n]0;mahajan1@jwlogin23:/p/scratch/envcomp/logs",,terminal_output
|
| 303 |
+
303,1719731,"TERMINAL",0,0,"cd ANTA_CLAUS/wandb",,terminal_command
|
| 304 |
+
304,1726328,"TERMINAL",0,0,"wandb sync offline-run-202512*",,terminal_command
|
| 305 |
+
305,1726368,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 306 |
+
306,1733834,"TERMINAL",0,0,"Find logs at: /tmp/debug-cli.mahajan1.log\r\n",,terminal_output
|
| 307 |
+
307,1735300,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/srhvpy1x ... ",,terminal_output
|
| 308 |
+
308,1775668,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 309 |
+
309,1776254,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/s3vt6j03 ... ",,terminal_output
|
| 310 |
+
310,1854902,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 311 |
+
311,1855347,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/hdeu863c ... ",,terminal_output
|
| 312 |
+
312,2607375,"TERMINAL",0,0,"done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/txpn35pr ... ",,terminal_output
|
| 313 |
+
313,2607523,"TERMINAL",0,0,"done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/2tmrcqyr ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/cynfzb05 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/gbiw7ser ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/vwqvlawr ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/qy3yond7 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/naygks5j ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/oiw5w79y ... ",,terminal_output
|
| 314 |
+
314,2607756,"TERMINAL",0,0,"done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/srqg6je9 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/0nh03pm4 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/7zbgvp5x ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/lq3zgm03 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/lu9cvgax ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/p2t8lqnz ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/r04esu69 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/ae6935ok ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/8pfgcefs ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/jl7irwel ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/bmylq4k5 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/ik1q9zz5 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/7qsqbv0l ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/4riovnzx ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/r5gobf6z ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/h4yo7jku ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/p1w5hpc9 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/8eru4ijj ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/05geexep ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/97n8prwm ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/995dj0ls ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/r8kd7td9 ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/b3ubrvgn ... done.\r\nSyncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/h7d0nu82 ... \r\n\r\n\r\n",,terminal_output
|
| 315 |
+
315,2615353,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 316 |
+
316,2616042,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/dxnhrjgl ... ",,terminal_output
|
| 317 |
+
317,2688171,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 318 |
+
318,2688814,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/8uqk41di ... ",,terminal_output
|
| 319 |
+
319,2713210,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 320 |
+
320,2713832,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/qycikhf6 ... ",,terminal_output
|
| 321 |
+
321,2738549,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 322 |
+
322,2739164,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/my1c4y3r ... ",,terminal_output
|
| 323 |
+
323,2780388,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 324 |
+
324,2780999,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/xfl44nex ... ",,terminal_output
|
| 325 |
+
325,2852143,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 326 |
+
326,2852868,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/1izuyptv ... ",,terminal_output
|
| 327 |
+
327,2876023,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 328 |
+
328,2876796,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/z4ouqxad ... ",,terminal_output
|
| 329 |
+
329,2916322,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 330 |
+
330,2917040,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/rcrggu5s ... ",,terminal_output
|
| 331 |
+
331,2987540,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 332 |
+
332,2988288,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/dgnm2rjh ... ",,terminal_output
|
| 333 |
+
333,3062857,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 334 |
+
334,3063479,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/sovqvfna ... ",,terminal_output
|
| 335 |
+
335,3088467,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 336 |
+
336,3089066,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/a4qul0re ... ",,terminal_output
|
| 337 |
+
337,3127410,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 338 |
+
338,3128025,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/cxqahaj0 ... ",,terminal_output
|
| 339 |
+
339,3151842,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 340 |
+
340,3152439,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/ernte0x4 ... ",,terminal_output
|
| 341 |
+
341,3175417,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 342 |
+
342,3175951,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/5t3dv9s2 ... ",,terminal_output
|
| 343 |
+
343,3214658,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 344 |
+
344,3215432,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/iismd3s9 ... ",,terminal_output
|
| 345 |
+
345,3253920,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 346 |
+
346,3254578,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/22c5kj9q ... ",,terminal_output
|
| 347 |
+
347,3294181,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 348 |
+
348,3294793,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/54umjcrz ... ",,terminal_output
|
| 349 |
+
349,3365370,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 350 |
+
350,3366016,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/axchcl5p ... ",,terminal_output
|
| 351 |
+
351,3439595,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 352 |
+
352,3439949,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/zeltecf3 ... ",,terminal_output
|
| 353 |
+
353,3513487,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 354 |
+
354,3514079,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/xtx69n1v ... ",,terminal_output
|
| 355 |
+
355,3552448,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 356 |
+
356,3553097,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/fb070njd ... ",,terminal_output
|
| 357 |
+
357,3576478,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 358 |
+
358,3577199,"TERMINAL",0,0,"Syncing: https://wandb.ai/instant-uv/crowd-pilot-miles/runs/w5ikcawf ... ",,terminal_output
|
| 359 |
+
359,3595221,"TERMINAL",0,0,"done.\r\n",,terminal_output
|
| 360 |
+
360,3595805,"TERMINAL",0,0,"]0;mahajan1@jwlogin23:/p/scratch/envcomp/logs/ANTA_CLAUS/wandb",,terminal_output
|
58dff52cba2a091453cfbef6169091e684254819f0b9f334dbecea6a130284bc/crowd-code-f9c548ba-e7ae-418f-bcd9-3b3e771f5fa01767372765713-2026_01_02-17.53.20.698/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-08cafcbe-d0e5-4505-ac95-8b9050d84d731759228460178-2025_09_30-12.34.56.10/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-10196e97-c322-40bf-836a-16ee811908931758807420822-2025_09_25-15.37.22.442/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1e70ca6b-f2dc-4f0c-81bb-b7d403b4df271752242192153-2025_07_11-15.56.51.266/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1f51b8ea-81c1-4db7-8702-1416f8c1c0cc1751376377945-2025_07_01-15.27.44.831/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-242fa472-b5db-492d-8b66-f482468772b21757500459062-2025_09_10-12.34.42.52/source.csv
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,7,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nimport optax\nimport orbax\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n name: str = """"\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n wandb_id: str = """"\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""dropout_rng""]}\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n rng = jax.random.PRNGKey(args.seed)\n if args.log:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_resolution, args.image_resolution, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n init_params = restore_genie_components(\n init_params, args.tokenizer_checkpoint, args.lam_checkpoint\n )\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n # --- TRAIN LOOP ---\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n step = 0\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _mask_rng = jax.random.split(rng, 3)\n inputs = dict(\n videos=videos,\n action=jnp.zeros((args.batch_size, args.seq_len), dtype=jnp.float32),\n dropout_rng=_rng,\n mask_rng=_mask_rng,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[15])),\n recon=wandb.Image(np.asarray(recon_seq[15])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab
|
| 3 |
+
2,479,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:34:42 PM [info] Activating crowd-code\n12:34:42 PM [info] Recording started\n12:34:42 PM [info] Initializing git provider using file system watchers...\n",Log,tab
|
| 4 |
+
3,1114,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"12:34:42 PM [info] Git repository found\n12:34:42 PM [info] Git provider initialized successfully\n12:34:42 PM [info] Initial git state: [object Object]\n",Log,content
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-27fd9e5e-b562-49ba-9321-8ed11ebad94f1756718814603-2025_09_01-11.27.16.489/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2b01d9ad-2c11-4b6b-bc1b-f335a6c7dd4a1750840473876-2025_06_25-10.34.49.55/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2cce3a90-32a5-4d8b-8cb0-10445a2ee7a71754054463184-2025_08_01-15.21.32.127/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2e25d757-859b-4fde-ba77-792b0eb397df1759579674644-2025_10_04-14.09.15.386/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2f4869ae-f0d3-4e60-80d2-8655e52f1ea31751064760332-2025_06_28-00.52.51.957/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3ba207b9-2f18-4919-a6bb-bebae1f850441758203079280-2025_09_18-15.45.09.509/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3ebbac58-2f0e-41b5-a15d-9a2b6b0c20ab1758725119572-2025_09_24-16.46.25.34/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-411c0b26-1d5f-4194-8163-38afd5728d3d1756886238975-2025_09_03-09.59.08.217/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-43fbfe2e-f9a4-4bb4-acf4-2bdbf37810851757006149083-2025_09_04-19.16.32.851/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-4b6051ce-1cfc-4dca-874b-0d0d7270d33f1753454394749-2025_07_25-16.42.21.379/source.csv
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,1366,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"4:42:21 PM [info] Activating crowd-code\n4:42:21 PM [info] Recording started\n4:42:21 PM [info] Initializing git provider using file system watchers...\n4:42:21 PM [info] Git repository found\n4:42:21 PM [info] Git provider initialized successfully\n4:42:21 PM [info] Initial git state: [object Object]\n",Log,tab
|
| 3 |
+
3,2902,"TERMINAL",0,0,"bash",,terminal_focus
|
| 4 |
+
4,33144,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-8-node-$slurm_job_id \\n --tags dynamics maskprob-fix 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab
|
| 5 |
+
5,51851,"TERMINAL",0,0,"bash",,terminal_focus
|
| 6 |
+
6,56221,"TERMINAL",0,0,"fsacct_week",,terminal_command
|
| 7 |
+
7,56266,"TERMINAL",0,0,"]633;E;2025-07-25 16:43:16 fsacct_week;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output
|
| 8 |
+
8,56266,"TERMINAL",0,0," JobID JobName Partition All State Elapsed Timelimit \r\n--------------- ------------------------------ ---------------- --- ------------ ---------- ---------- \r\n 3358457 train_dyn_yolorun_new_arch accelerated 48 FAILED 00:00:28 2-00:00:00 \r\n 3359334 wrap accelerated 6 TIMEOUT 10:00:29 10:00:00 \r\n 3359338 wrap accelerated 6 TIMEOUT 10:00:16 10:00:00 \r\n 3359343 train_dyn_new_arch-bugfixed-s+ accelerated 48 COMPLETED 23:19:14 2-00:00:00 \r\n 3359349 train_dyn_new_arch-bugfixed-t+ accelerated 48 COMPLETED 1-01:00:55 2-00:00:00 \r\n 3365873 train_dynamics_overfit_sample+ accelerated 6 COMPLETED 01:26:52 2-00:00:00 \r\n 3365876 train_dynamics_overfit_sample+ accelerated 6 COMPLETED 01:40:07 2-00:00:00 \r\n 3366883 train_dynamics_overfit_sample+ accelerated 6 COMPLETED 01:33:31 2-00:00:00 \r\n 3371238 train_dynamics_maskprob_fix_2+ accelerated 48 RUNNING 1-16:09:01 2-00:00:00 \r\n 3372629 train_dynamics_maskprob_fix_8+ accelerated 192 COMPLETED 1-02:29:22 2-00:00:00 \r\n 3372631 train_dynamics_maskprob_fix_2+ accelerated 48 COMPLETED 1-01:17:59 2-00:00:00 \r\n 3372931 train_dyn_causal_180M dev_accelerated 6 FAILED 00:00:33 00:10:00 \r\n 3372932 train_dyn_causal_255M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372934 train_dyn_causal_356M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372936 train_dyn_causal_500M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372969 train_dyn_causal_180M dev_accelerated 6 FAILED 00:02:11 00:10:00 \r\n 3372970 train_dyn_causal_255M dev_accelerated 6 FAILED 00:02:24 00:10:00 \r\n 3372971 train_dyn_causal_356M dev_accelerated 6 FAILED 00:02:08 00:10:00 \r\n 3372972 train_dyn_causal_500M dev_accelerated 6 FAILED 00:02:09 00:10:00 \r\n 3373107 train_dyn_causal_180M dev_accelerated 6 COMPLETED 00:06:15 00:10:00 \r\n 3373108 train_dyn_causal_255M dev_accelerated 6 COMPLETED 00:07:14 00:10:00 \r\n 3373109 train_dyn_causal_356M dev_accelerated 6 FAILED 00:04:17 00:10:00 \r\n 3373110 train_dyn_causal_500M dev_accelerated 6 FAILED 00:04:59 00:10:00 \r\n 3373400 wrap accelerated 6 COMPLETED 00:04:34 02:00:00 \r\n 3373404 wrap accelerated 6 COMPLETED 00:04:38 02:00:00 \r\n 3373407 train_dynamics_causal_2_node accelerated 48 RUNNING 05:58:48 2-00:00:00 \r\n 3373408 train_dynamics_causal_8_node accelerated 192 RUNNING 05:58:48 2-00:00:00 \r\n 3373409 wrap accelerated 6 COMPLETED 00:41:24 02:00:00 \r\n 3373410 wrap accelerated 6 COMPLETED 00:42:56 02:00:00 \r\n 3371237 train_dynamics_maskprob_fix_8+ accelerated 192 RUNNING 00:45:54 2-00:00:00 \r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output
|
| 9 |
+
9,489274,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab
|
| 10 |
+
10,492538,"TERMINAL",0,0,"bash",,terminal_focus
|
| 11 |
+
11,496388,"TERMINAL",0,0,"cd $ws_dir",,terminal_command
|
| 12 |
+
12,496810,"TERMINAL",0,0,"ls",,terminal_command
|
| 13 |
+
13,496859,"TERMINAL",0,0,"]633;E;2025-07-25 16:50:38 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output
|
| 14 |
+
14,496990,"TERMINAL",0,0,"[0m[01;34mcheckpoints[0m count_items.sh [01;34mdata[0m [01;34mdata_new[0m [01;34mhuggingface[0m [01;34mlogs[0m possibly_corrupt_files_in_this_workspace.txt [01;34mscripts[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output
|
| 15 |
+
15,501267,"TERMINAL",0,0,"cd checkpoints/",,terminal_command
|
| 16 |
+
16,501293,"TERMINAL",0,0,"]633;E;2025-07-25 16:50:42 cd checkpoints/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output
|
| 17 |
+
17,501578,"TERMINAL",0,0,"ls",,terminal_command
|
| 18 |
+
18,501628,"TERMINAL",0,0,"]633;E;2025-07-25 16:50:42 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output
|
| 19 |
+
19,501871,"TERMINAL",0,0,"[0m[01;34m0000[0m [01;34m3291405[0m [01;34m3292335[0m [01;34m3296571[0m [01;34m3297606[0m [01;34m3299069[0m [01;34m3301026[0m [01;34m3310436[0m [01;34m3316022[0m [01;34mlam_ckpt_dir[0m [01;34mtrain_dyn_causal_500M[0m\r\n[01;34m3290283[0m [01;34m3292213[0m [01;34m3292336[0m [01;34m3296573[0m [01;34m3297671[0m [01;34m3299258[0m [01;34m3301027[0m [01;34m3310437[0m [01;34mbig-runs[0m [01;34mlam_main_test[0m [01;34mtrain_dyn_new_arch-bugfixed-spatial-shift[0m\r\n[01;34m3290284[0m [01;34m3292221[0m [01;34m3292337[0m [01;34m3296574[0m [01;34m3297693[0m [01;34m3299259[0m [01;34m3301029[0m [01;34m3311671[0m [01;34mcausal[0m [01;34mmaskgit-maskprob-fix[0m [01;34mtrain_dyn_new_arch-bugfixed-temporal-shift[0m\r\n[01;34m3290295[0m [01;34m3292258[0m [01;34m3292338[0m [01;34m3296575[0m [01;34m3297706[0m [01;34m3299272[0m [01;34m3301030[0m [01;34m3311672[0m [01;34mcheckpoints_alfred[0m [01;34mtokenizer[0m [01;34mtrain_dyn_yolorun_new_arch[0m\r\n[01;34m3290296[0m [01;34m3292328[0m [01;34m3292339[0m [01;34m3297569[0m [01;34m3297727[0m [01;34m3299579[0m [01;34m3301031[0m [01;34m3313562[0m [01;34mcoinrun[0m [01;34mtokenizer_ckpt_dir[0m [01;34mtrain_lam_minecraft_overfit_sample[0m\r\n[01;34m3290366[0m [01;34m3292329[0m [01;34m3294600[0m [01;34m3297575[0m [01;34m3299016[0m [01;34m3300233[0m [01;34m3306801[0m [01;34m3313563[0m [01;34mdebug[0m [01;34mtrain_dynamics_lr_schedule_const[0m [01;34mtrain_tokenizer_batch_size_scaling_16_node[0m\r\n[01;34m3290367[0m [01;34m3292330[0m [01;34m3294601[0m [01;34m3297576[0m [01;34m3299062[0m [01;34m3300290[0m [01;34m3307618[0m [01;34m3313564[0m [01;34mdyn[0m [01;34mtrain_dynamics_lr_schedule_cos[0m [01;34mtrain_tokenizer_minecraft_overfit_sample[0m\r\n[01;34m3290391[0m [01;34m3292331[0m [01;34m3294602[0m [01;34m3297577[0m [01;34m3299063[0m [01;34m3300658[0m [01;34m3307619[0m [01;34m3313565[0m [01;34mdynamics_ckpt_dir[0m [01;34mtrain_dynamics_lr_schedule_wsd[0m [01;34mwrap[0m\r\n[01;34m3290392[0m [01;34m3292332[0m [01;34m3294603[0m [01;34m3297578[0m [01;34m3299065[0m [01;34m3300663[0m [01;34m3309662[0m [01;34m3313570[0m [01;34minteractive[0m [01;34mtrain_dyn_causal_180M[0m\r\n[01;34m3290439[0m [01;34m3292333[0m [01;34m3296502[0m [01;34m3297582[0m [01;34m3299066[0m [01;34m3300672[0m [01;34m3309663[0m [01;34m3313571[0m [01;34mlam[0m [01;34mtrain_dyn_causal_255M[0m\r\n[01;34m3290440[0m [01;34m3292334[0m [01;34m3296540[0m [01;34m3297586[0m [01;34m3299068[0m [01;34m3301025[0m [01;34m3309699[0m [01;34m3313572[0m [01;34mlam-1-action[0m [01;34mtrain_dyn_causal_356M[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output
|
| 20 |
+
20,503515,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab
|
| 21 |
+
21,506478,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1559,0,"",shellscript,selection_mouse
|
| 22 |
+
22,506611,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1557,8,"maskprob",shellscript,selection_mouse
|
| 23 |
+
23,511718,"TERMINAL",0,0,"cd maskgit-maskprob-fix/",,terminal_command
|
| 24 |
+
24,512173,"TERMINAL",0,0,"ls",,terminal_command
|
| 25 |
+
25,512216,"TERMINAL",0,0,"]633;E;2025-07-25 16:50:53 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output
|
| 26 |
+
26,512275,"TERMINAL",0,0,"[0m[01;34minteractive[0m [01;34mtrain_dynamics_maskprob_fix_2_node[0m [01;34mtrain_dynamics_maskprob_fix_2_node_80M[0m [01;34mtrain_dynamics_maskprob_fix_8_node[0m [01;34mtrain_dynamics_maskprob_fix_8_node_80M[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;0",,terminal_output
|
| 27 |
+
27,520223,"TERMINAL",0,0,"cd train_dynamics_maskprob_fix_",,terminal_command
|
| 28 |
+
28,520228,"TERMINAL",0,0,"]633;E;2025-07-25 16:51:01 cd train_dynamics_maskprob_fix_;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cbash: cd: train_dynamics_maskprob_fix_: No such file or directory\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;1",,terminal_output
|
| 29 |
+
29,522776,"TERMINAL",0,0,"cd train_dynamics_maskprob_fix_8_node",,terminal_command
|
| 30 |
+
30,522802,"TERMINAL",0,0,"]633;E;2025-07-25 16:51:04 cd train_dynamics_maskprob_fix_8_node;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node]633;D;0",,terminal_output
|
| 31 |
+
31,523278,"TERMINAL",0,0,"ls",,terminal_command
|
| 32 |
+
32,523301,"TERMINAL",0,0,"]633;E;2025-07-25 16:51:04 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m3370788[0m [01;34m3371237[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node]633;D;0",,terminal_output
|
| 33 |
+
33,536976,"TERMINAL",0,0,"cd 3371237/",,terminal_command
|
| 34 |
+
34,537010,"TERMINAL",0,0,"]633;E;2025-07-25 16:51:18 cd 3371237/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237]633;D;0",,terminal_output
|
| 35 |
+
35,537454,"TERMINAL",0,0,"ls",,terminal_command
|
| 36 |
+
36,537509,"TERMINAL",0,0,"]633;E;2025-07-25 16:51:18 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m016000[0m [01;34m017000[0m [01;34m018000[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237]633;D;0",,terminal_output
|
| 37 |
+
37,557600,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab
|
| 38 |
+
38,560516,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2060,0,"",shellscript,selection_mouse
|
| 39 |
+
39,560673,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2059,4,"1000",shellscript,selection_mouse
|
| 40 |
+
40,561723,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2147,0,"",shellscript,selection_mouse
|
| 41 |
+
41,562486,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2144,3,"fix",shellscript,selection_mouse
|
| 42 |
+
42,563079,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2110,0,"",shellscript,selection_mouse
|
| 43 |
+
43,563243,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2108,4,"1000",shellscript,selection_mouse
|
| 44 |
+
44,579775,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:]\n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab
|
| 45 |
+
45,588014,"train_dynamics.py",2371,0,"",python,selection_mouse
|
| 46 |
+
46,588162,"train_dynamics.py",2367,5,"20000",python,selection_mouse
|
| 47 |
+
47,598870,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 48 |
+
48,599269,"TERMINAL",0,0,"ls",,terminal_command
|
| 49 |
+
49,600516,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 50 |
+
50,600835,"TERMINAL",0,0,"ls",,terminal_command
|
| 51 |
+
51,600876,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:22 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34minteractive[0m [01;34mtrain_dynamics_maskprob_fix_2_node[0m [01;34mtrain_dynamics_maskprob_fix_2_node_80M[0m [01;34mtrain_dynamics_maskprob_fix_8_node[0m [01;34mtrain_dynamics_maskprob_fix_8_node_80M[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;0",,terminal_output
|
| 52 |
+
52,610052,"TERMINAL",0,0,"cd train_dynamics_maskprob_fix_2_node",,terminal_command
|
| 53 |
+
53,610076,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:31 cd train_dynamics_maskprob_fix_2_node;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_2_node]633;D;0",,terminal_output
|
| 54 |
+
54,611337,"TERMINAL",0,0,"ls",,terminal_command
|
| 55 |
+
55,611384,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:32 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m3371238[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_2_node]633;D;0",,terminal_output
|
| 56 |
+
56,613099,"TERMINAL",0,0,"cd 3371238/",,terminal_command
|
| 57 |
+
57,613125,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:34 cd 3371238/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_2_node/3371238]633;D;0",,terminal_output
|
| 58 |
+
58,613491,"TERMINAL",0,0,"ls",,terminal_command
|
| 59 |
+
59,613535,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:34 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m020000[0m [01;34m031000[0m [01;34m032000[0m [01;34m033000[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_2_node/3371238]633;D;0",,terminal_output
|
| 60 |
+
60,625849,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 61 |
+
61,625875,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:47 cd ..;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_2_node]633;D;0",,terminal_output
|
| 62 |
+
62,626753,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 63 |
+
63,626781,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:48 cd ..;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;0",,terminal_output
|
| 64 |
+
64,628009,"TERMINAL",0,0,"cd ..",,terminal_command
|
| 65 |
+
65,628426,"TERMINAL",0,0,"ls",,terminal_command
|
| 66 |
+
66,628538,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:49 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m0000[0m [01;34m3291405[0m [01;34m3292335[0m [01;34m3296571[0m [01;34m3297606[0m [01;34m3299069[0m [01;34m3301026[0m [01;34m3310436[0m [01;34m3316022[0m [01;34mlam_ckpt_dir[0m [01;34mtrain_dyn_causal_500M[0m\r\n[01;34m3290283[0m [01;34m3292213[0m [01;34m3292336[0m [01;34m3296573[0m [01;34m3297671[0m [01;34m3299258[0m [01;34m3301027[0m [01;34m3310437[0m [01;34mbig-runs[0m [01;34mlam_main_test[0m [01;34mtrain_dyn_new_arch-bugfixed-spatial-shift[0m\r\n[01;34m3290284[0m [01;34m3292221[0m [01;34m3292337[0m [01;34m3296574[0m [01;34m3297693[0m [01;34m3299259[0m [01;34m3301029[0m [01;34m3311671[0m [01;34mcausal[0m [01;34mmaskgit-maskprob-fix[0m [01;34mtrain_dyn_new_arch-bugfixed-temporal-shift[0m\r\n[01;34m3290295[0m [01;34m3292258[0m [01;34m3292338[0m [01;34m3296575[0m [01;34m3297706[0m [01;34m3299272[0m [01;34m3301030[0m [01;34m3311672[0m [01;34mcheckpoints_alfred[0m [01;34mtokenizer[0m [01;34mtrain_dyn_yolorun_new_arch[0m\r\n[01;34m3290296[0m [01;34m3292328[0m [01;34m3292339[0m [01;34m3297569[0m [01;34m3297727[0m [01;34m3299579[0m [01;34m3301031[0m [01;34m3313562[0m [01;34mcoinrun[0m [01;34mtokenizer_ckpt_dir[0m [01;34mtrain_lam_minecraft_overfit_sample[0m\r\n[01;34m3290366[0m [01;34m3292329[0m [01;34m3294600[0m [01;34m3297575[0m [01;34m3299016[0m [01;34m3300233[0m [01;34m3306801[0m [01;34m3313563[0m [01;34mdebug[0m [01;34mtrain_dynamics_lr_schedule_const[0m [01;34mtrain_tokenizer_batch_size_scaling_16_node[0m\r\n[01;34m3290367[0m [01;34m3292330[0m [01;34m3294601[0m [01;34m3297576[0m [01;34m3299062[0m [01;34m3300290[0m [01;34m3307618[0m [01;34m3313564[0m [01;34mdyn[0m [01;34mtrain_dynamics_lr_schedule_cos[0m [01;34mtrain_tokenizer_minecraft_overfit_sample[0m\r\n[01;34m3290391[0m [01;34m3292331[0m [01;34m3294602[0m [01;34m3297577[0m [01;34m3299063[0m [01;34m3300658[0m [01;34m3307619[0m [01;34m3313565[0m [01;34mdynamics_ckpt_dir[0m [01;34mtrain_dynamics_lr_schedule_wsd[0m [01;34mwrap[0m\r\n[01;34m3290392[0m [01;34m3292332[0m [01;34m3294603[0m [01;34m3297578[0m [01;34m3299065[0m [01;34m3300663[0m [01;34m3309662[0m [01;34m3313570[0m [01;34minteractive[0m [01;34mtrain_dyn_causal_180M[0m\r\n[01;34m3290439[0m [01;34m3292333[0m [01;34m3296502[0m [01;34m3297582[0m [01;34m3299066[0m [01;34m3300672[0m [01;34m3309663[0m [01;34m3313571[0m [01;34mlam[0m [01;34mtrain_dyn_causal_255M[0m\r\n[01;34m3290440[0m [01;34m3292334[0m [01;34m3296540[0m [01;34m3297586[0m [01;34m3299068[0m [01;34m3301025[0m [01;34m3309699[0m [01;34m3313572[0m [01;34mlam-1-action[0m [01;34mtrain_dyn_causal_356M[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output
|
| 67 |
+
67,631264,"TERMINAL",0,0,"cd causal/l",,terminal_command
|
| 68 |
+
68,631312,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:52 cd causal/l;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cbash: cd: causal/l: No such file or directory\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;1",,terminal_output
|
| 69 |
+
69,632746,"TERMINAL",0,0,"cd causal/",,terminal_command
|
| 70 |
+
70,632787,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:53 cd causal/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal]633;D;0",,terminal_output
|
| 71 |
+
71,633679,"TERMINAL",0,0,"ls",,terminal_command
|
| 72 |
+
72,633695,"TERMINAL",0,0,"]633;E;2025-07-25 16:52:54 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34moverfit[0m [01;34moverfit-seed69-1[0m [01;34moverfit-seed69-1-no-noise[0m [01;34mtrain_dynamics_causal_2_node[0m [01;34mtrain_dynamics_causal_8_node[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal]633;D;0",,terminal_output
|
| 73 |
+
73,639358,"TERMINAL",0,0,"cd train_dynamics_causal_8_node/",,terminal_command
|
| 74 |
+
74,639389,"TERMINAL",0,0,"]633;E;2025-07-25 16:53:00 cd train_dynamics_causal_8_node/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_8_node]633;D;0",,terminal_output
|
| 75 |
+
75,639775,"TERMINAL",0,0,"ls",,terminal_command
|
| 76 |
+
76,639812,"TERMINAL",0,0,"]633;E;2025-07-25 16:53:01 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m3373408[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_8_node]633;D;0",,terminal_output
|
| 77 |
+
77,641099,"TERMINAL",0,0,"cd 3373408/",,terminal_command
|
| 78 |
+
78,641109,"TERMINAL",0,0,"]633;E;2025-07-25 16:53:02 cd 3373408/;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_8_node/3373408]633;D;0",,terminal_output
|
| 79 |
+
79,641668,"TERMINAL",0,0,"ls",,terminal_command
|
| 80 |
+
80,641717,"TERMINAL",0,0,"]633;E;2025-07-25 16:53:02 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[0m[01;34m003000[0m [01;34m004000[0m [01;34m005000[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_8_node/3373408]633;D;0",,terminal_output
|
| 81 |
+
81,968762,"TERMINAL",0,0,"git branch",,terminal_command
|
| 82 |
+
82,968798,"TERMINAL",0,0,"]633;E;2025-07-25 16:58:30 git branch;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cfatal: not a git repository (or any parent up to mount point /hkfs)\r\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_8_node/3373408]633;D;128",,terminal_output
|
| 83 |
+
83,971943,"TERMINAL",0,0,"bash",,terminal_focus
|
| 84 |
+
84,973457,"TERMINAL",0,0,"git branch",,terminal_command
|
| 85 |
+
85,973508,"TERMINAL",0,0,"]633;E;2025-07-25 16:58:34 git branch;2f980232-d92f-4231-927c-3ee17e3c6d04]633;C[?1h=\r",,terminal_output
|
| 86 |
+
86,973653,"TERMINAL",0,0," add-wandb-name-and-tags[m[m\r\n causal-st-transformer[m[m\r\n causal-transformer-dynamics-model[m[m\r\n convert-to-jax-array-in-iter[m[m\r\n correct-batched-sampling[m[m\r\n dev[m[m\r\n dont-let-tf-see-gpu[m[m\r\n feat/explicit-image-dims[m[m\r\n fix-sampling[m[m\r\n grad-norm-log-and-clip[m[m\r\n grain-dataloader[m[m\r\n logging-variants[m[m\r\n lr-schedules[m[m\r\n main[m[m\r\n maskgit-different-maskprob-per-sample[m[m\r\n:[K",,terminal_output
|
| 87 |
+
87,974834,"TERMINAL",0,0,"\r[K metrics-logging-for-dynamics-model[m[m\r\n:[K",,terminal_output
|
| 88 |
+
88,975036,"TERMINAL",0,0,"\r[K monkey-patch[m[m\r\n:[K",,terminal_output
|
| 89 |
+
89,975185,"TERMINAL",0,0,"\r[K* [32mnew-arch-sampling[m[m\r\n:[K",,terminal_output
|
| 90 |
+
90,975295,"TERMINAL",0,0,"\r[K preprocess_video[m[m\r\n:[K",,terminal_output
|
| 91 |
+
91,975531,"TERMINAL",0,0,"\r[K refactor-tmp[m[m\r\n:[K",,terminal_output
|
| 92 |
+
92,975700,"TERMINAL",0,0,"\r[K revised-dataloader[m[m\r\n:[K",,terminal_output
|
| 93 |
+
93,975801,"TERMINAL",0,0,"\r[K runner[m[m\r\n:[K",,terminal_output
|
| 94 |
+
94,975984,"TERMINAL",0,0,"\r[K runner-grain[m[m\r\n:[K",,terminal_output
|
| 95 |
+
95,976103,"TERMINAL",0,0,"\r[K sample-from-different-topologies[m[m\r\n:[K",,terminal_output
|
| 96 |
+
96,976214,"TERMINAL",0,0,"\r[K speedup-tfrecord-preprocessing[m[m\r\n:[K",,terminal_output
|
| 97 |
+
97,976418,"TERMINAL",0,0,"\r[K tmp[m[m\r\n:[K",,terminal_output
|
| 98 |
+
98,976534,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 99 |
+
99,976646,"TERMINAL",0,0,"\r[K\r[K[7m(END)[27m[K",,terminal_output
|
| 100 |
+
100,976827,"TERMINAL",0,0,"[?25l[16;6H[X[0m\r[K\r[K[7m(END)[27m[K[?25h",,terminal_output
|
| 101 |
+
101,976967,"TERMINAL",0,0,"[?25l[16;7H[X[16;6H[X[0m\r[K\r[K[7m(END)[27m[K[?25h",,terminal_output
|
| 102 |
+
102,977071,"TERMINAL",0,0,"\r[K[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output
|
| 103 |
+
103,979880,"TERMINAL",0,0,"bash",,terminal_focus
|
| 104 |
+
104,1001754,"TERMINAL",0,0,"sacct",,terminal_command
|
| 105 |
+
105,1001800,"TERMINAL",0,0,"]633;E;2025-07-25 16:59:03 sacct;406cfb31-2341-454a-afa8-cae7781806b2]633;CJobID JobName Partition Account AllocCPUS State ExitCode \r\n------------ ---------- ---------- ---------- ---------- ---------- -------- \r\n3371238 train_dyn+ accelerat+ hk-projec+ 48 RUNNING 0:0 \r\n3371238.bat+ batch hk-projec+ 24 RUNNING 0:0 \r\n3371238.ext+ extern hk-projec+ 48 RUNNING 0:0 \r\n3371238.0 python hk-projec+ 40 RUNNING 0:0 \r\n3372629 train_dyn+ accelerat+ hk-projec+ 192 COMPLETED 0:0 \r\n3372629.bat+ batch hk-projec+ 24 COMPLETED 0:0 \r\n3372629.ext+ extern hk-projec+ 192 COMPLETED 0:0 \r\n3372629.0 python hk-projec+ 160 COMPLETED 0:0 \r\n3372631 train_dyn+ accelerat+ hk-projec+ 48 COMPLETED 0:0 \r\n3372631.bat+ batch hk-projec+ 24 COMPLETED 0:0 \r\n3372631.ext+ extern hk-projec+ 48 COMPLETED 0:0 \r\n3372631.0 python hk-projec+ 40 COMPLETED 0:0 \r\n3373407 train_dyn+ accelerat+ hk-projec+ 48 RUNNING 0:0 \r\n3373407.bat+ batch hk-projec+ 24 RUNNING 0:0 \r\n3373407.ext+ extern hk-projec+ 48 RUNNING 0:0 \r\n3373407.0 python hk-projec+ 40 RUNNING 0:0 \r\n3373408 train_dyn+ accelerat+ hk-projec+ 192 RUNNING 0:0 \r\n3373408.bat+ batch hk-projec+ 24 RUNNING 0:0 \r\n3373408.ext+ extern hk-projec+ 192 RUNNING 0:0 \r\n3373408.0 python hk-projec+ 160 RUNNING 0:0 \r\n3371237 train_dyn+ accelerat+ hk-projec+ 192 RUNNING 0:0 \r\n3371237.bat+ batch hk-projec+ 24 RUNNING 0:0 \r\n3371237.ext+ extern hk-projec+ 192 RUNNING 0:0 \r\n3371237.0 python hk-projec+ 160 RUNNING 0:0 \r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output
|
| 106 |
+
106,1011767,"TERMINAL",0,0,"alias",,terminal_command
|
| 107 |
+
107,1011813,"TERMINAL",0,0,"]633;E;2025-07-25 16:59:13 alias;406cfb31-2341-454a-afa8-cae7781806b2]633;Calias egrep='egrep --color=auto'\r\nalias fgrep='fgrep --color=auto'\r\nalias fqueue='watch -n 1 ""squeue -o \""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R\""""'\r\nalias fsacct_week='sacct --format=""JobID%15,JobName%30,Partition%16,AllocCPUS%3,State%12,Elapsed%10,Timelimit%10"" --starttime $(date -d ""last week"" +%Y-%m-%d) | grep -vE ""*.batch|*.extern|*.inter|bash|python|CANCELLED|echo""'\r\nalias grep='grep --color=auto'\r\nalias idle='sinfo_t_idle'\r\nalias idling='watch -n1 sinfo_t_idle'\r\nalias l.='ls -d .* --color=auto'\r\nalias ll='ls -l --color=auto'\r\nalias ls='ls --color=auto'\r\nalias mc='. /usr/libexec/mc/mc-wrapper.sh'\r\nalias queue='watch -n1 squeue --me'\r\nalias runner='cd /home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/'\r\nalias runner-2='cd /home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_2/'\r\nalias salloc_cpu='salloc --time=01:00:00 --partition=dev_cpuonly --nodes=1 --cpus-per-task=128'\r\nalias salloc_node='salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8'\r\nalias smi='watch -n1 nvidia-smi'\r\nalias sync-runner='sh /home/hk-project-p0023960/tum_cte0515/sync_runner.sh /home/hk-project-p0023960/tum_cte0515/Projects/jafar /home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/'\r\nalias sync-runner-2='sh /home/hk-project-p0023960/tum_cte0515/sync_runner.sh /home/hk-project-p0023960/tum_cte0515/Projects/jafar /home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_2/'\r\nalias xzegrep='xzegrep --color=auto'\r\nalias xzfgrep='xzfgrep --color=auto'\r\nalias xzgrep='xzgrep --color=auto'\r\nalias zegrep='zegrep --color=auto'\r\nalias zfgrep='zfgrep --color=auto'\r\nalias zgrep='zgrep --color=auto'\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output
|
| 108 |
+
108,1037050,"TERMINAL",0,0,"sacct --format=""JobID%15,JobName%30,Partition%16,AllocCPUS%3,State%12,Elapsed%10,Timelimit%10"" --starttime $(date -d ""last week"" +%Y-%m-%d) | grep -vE ""*.batch|*.extern|*.inter"" | grep ""accelerate""",,terminal_command
|
| 109 |
+
109,1037128,"TERMINAL",0,0,"]633;E;2025-07-25 16:59:38 sacct --format=""JobID%15,JobName%30,Partition%16,AllocCPUS%3,State%12,Elapsed%10,Timelimit%10"" --starttime $(date -d ""last week"" +%Y-%m-%d) | grep -vE ""*.batch|*.extern|*.inter"" | grep ""accelerate"";406cfb31-2341-454a-afa8-cae7781806b2]633;C 3358457 train_dyn_yolorun_new_arch [01;31m[Kaccelerate[m[Kd 48 FAILED 00:00:28 2-00:00:00 \r\n 3359333 wrap [01;31m[Kaccelerate[m[Kd 6 CANCELLED b+ 00:00:18 10:00:00 \r\n 3359334 wrap [01;31m[Kaccelerate[m[Kd 6 TIMEOUT 10:00:29 10:00:00 \r\n 3359338 wrap [01;31m[Kaccelerate[m[Kd 6 TIMEOUT 10:00:16 10:00:00 \r\n 3359343 train_dyn_new_arch-bugfixed-s+ [01;31m[Kaccelerate[m[Kd 48 COMPLETED 23:19:14 2-00:00:00 \r\n 3359349 train_dyn_new_arch-bugfixed-t+ [01;31m[Kaccelerate[m[Kd 48 COMPLETED 1-01:00:55 2-00:00:00 \r\n 3365872 train_dynamics_overfit_sample+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3365873 train_dynamics_overfit_sample+ [01;31m[Kaccelerate[m[Kd 6 COMPLETED 01:26:52 2-00:00:00 \r\n 3365876 train_dynamics_overfit_sample+ [01;31m[Kaccelerate[m[Kd 6 COMPLETED 01:40:07 2-00:00:00 \r\n 3366843 train_dynamics_overfit_sample+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3366883 train_dynamics_overfit_sample+ [01;31m[Kaccelerate[m[Kd 6 COMPLETED 01:33:31 2-00:00:00 \r\n 3370768 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3370769 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3370787 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 00:15:00 \r\n 3370788 train_dynamics_maskprob_fix_8+ dev_[01;31m[Kaccelerate[m[Kd 6 CANCELLED b+ 00:05:54 00:15:00 \r\n 3370822 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3371238 train_dynamics_maskprob_fix_2+ [01;31m[Kaccelerate[m[Kd 48 RUNNING 1-16:25:23 2-00:00:00 \r\n 3372629 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 192 COMPLETED 1-02:29:22 2-00:00:00 \r\n 3372631 train_dynamics_maskprob_fix_2+ [01;31m[Kaccelerate[m[Kd 48 COMPLETED 1-01:17:59 2-00:00:00 \r\n 3372929 train_dyn_causal dev_[01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 00:10:00 \r\n 3372931 train_dyn_causal_180M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:00:33 00:10:00 \r\n 3372932 train_dyn_causal_255M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:00:29 00:10:00 \r\n 3372934 train_dyn_causal_356M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:00:29 00:10:00 \r\n 3372936 train_dyn_causal_500M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:00:29 00:10:00 \r\n 3372963 train_dyn_causal_180M dev_[01;31m[Kaccelerate[m[Kd 6 CANCELLED b+ 00:00:29 00:10:00 \r\n 3372964 train_dyn_causal_255M dev_[01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 00:10:00 \r\n 3372965 train_dyn_causal_356M dev_[01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 00:10:00 \r\n 3372966 train_dyn_causal_500M dev_[01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 00:10:00 \r\n 3372969 train_dyn_causal_180M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:02:11 00:10:00 \r\n 3372970 train_dyn_causal_255M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:02:24 00:10:00 \r\n 3372971 train_dyn_causal_356M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:02:08 00:10:00 \r\n 3372972 train_dyn_causal_500M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:02:09 00:10:00 \r\n 3373107 train_dyn_causal_180M dev_[01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:06:15 00:10:00 \r\n 3373108 train_dyn_causal_255M dev_[01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:07:14 00:10:00 \r\n 3373109 train_dyn_causal_356M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:04:17 00:10:00 \r\n 3373110 train_dyn_causal_500M dev_[01;31m[Kaccelerate[m[Kd 6 FAILED 00:04:59 00:10:00 \r\n 3373205 train_dynamics_causal_2_node [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3373207 train_dynamics_causal_8_node [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3373213 train_dynamics_causal_8_node [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3373276 train_dynamics_causal_2_node [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3373277 train_dynamics_causal_8_node [01;31m[Kaccelerate[m[Kd 0 CANCELLED b+ 00:00:00 2-00:00:00 \r\n 3373400 wrap [01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:04:34 02:00:00 \r\n 3373404 wrap [01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:04:38 02:00:00 \r\n 3373407 train_dynamics_causal_2_node [01;31m[Kaccelerate[m[Kd 48 RUNNING 06:15:10 2-00:00:00 \r\n 3373408 train_dynamics_causal_8_node [01;31m[Kaccelerate[m[Kd 192 RUNNING 06:15:10 2-00:00:00 \r\n 3373409 wrap [01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:41:24 02:00:00 \r\n 3373410 wrap [01;31m[Kaccelerate[m[Kd 6 COMPLETED 00:42:56 02:00:00 \r\n 3371237 train_dynamics_maskprob_fix_8+ [01;31m[Kaccelerate[m[Kd 192 RUNNING 01:02:16 2-00:00:00 \r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output
|
| 110 |
+
110,5563097,"TERMINAL",0,0,"queue",,terminal_command
|
| 111 |
+
111,5563185,"TERMINAL",0,0,"]633;E;2025-07-25 18:15:03 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h[22;0;0t[1;29r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;134Hhkn1990.localdomain: Fri Jul 25 18:15:03 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3373407 accelerat train_dy tum_cte0 R 7:30:35\t 2 hkn[0501,0506][5;12H3373408 accelerat train_dy tum_cte0 R 7:30:35\t 8 hkn[0406,0409,0411,0413,0421,0423-0424,0525][6;12H3371237 accelerat train_dy tum_cte0 R 2:17:41\t 8 hkn[0410,0429,0520,0532,0607,0610,0810,0814][7;12H3371238 accelerat train_dy tum_cte0 R 1-17:40:48\t 2 hkn[0706,0710][29;178H",,terminal_output
|
| 112 |
+
112,5564311,"TERMINAL",0,0,"[1;173H4[4;60H6[5d6[6d2[7d9[29;178H",,terminal_output
|
| 113 |
+
113,5565250,"TERMINAL",0,0,"[1;173H5[4;60H7[5d7[6d3[7d50[29;178H",,terminal_output
|
| 114 |
+
114,5566469,"TERMINAL",0,0,"[1;173H6[4;60H9[5d9[6d5[7d2[29;178H",,terminal_output
|
| 115 |
+
115,5567659,"TERMINAL",0,0,"[1;173H8[4;59H40[5d40[6d6[7d3[29;178H",,terminal_output
|
| 116 |
+
116,5568855,"TERMINAL",0,0,"[1;173H9[4;60H1[5d1[6d7[7d4[29;178H",,terminal_output
|
| 117 |
+
117,5569632,"TERMINAL",0,0,"[1;172H10[4;60H2[5d2[6d8[7d5[29;178H",,terminal_output
|
| 118 |
+
118,5570209,"TERMINAL",0,0,"[1;173H1[4;60H3[5d3[6d9[7d6[29;178H",,terminal_output
|
| 119 |
+
119,5571324,"TERMINAL",0,0,"[1;173H2[4;60H4[5d4[6d50[7d7[29;178H",,terminal_output
|
| 120 |
+
120,5572586,"TERMINAL",0,0,"[1;173H3[4;60H5[5d5[6d1[7d8[29;178H",,terminal_output
|
| 121 |
+
121,5573472,"TERMINAL",0,0,"[1;173H4[4;60H6[5d6[6d2[7d9[29;178H",,terminal_output
|
| 122 |
+
122,5574531,"TERMINAL",0,0,"[1;173H5[4;60H7[5d7[6d3[7;57H1:00[29;178H",,terminal_output
|
| 123 |
+
123,5575451,"TERMINAL",0,0,"[1;173H6[4;60H8[5d8[6d4[7d1[29;178H",,terminal_output
|
| 124 |
+
124,5575776,"TERMINAL",0,0,"[29;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output
|
| 125 |
+
125,5576793,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-50d32311-0df8-4235-b597-7d69f06b72151752666637262-2025_07_16-13.50.55.636/source.csv
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,209,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab
|
| 3 |
+
3,327,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:50:55 PM [info] Activating crowd-code\n1:50:55 PM [info] Recording started\n1:50:55 PM [info] Initializing git provider using file system watchers...\n1:50:55 PM [info] Git repository found\n1:50:55 PM [info] Git provider initialized successfully\n1:50:55 PM [info] Initial git state: [object Object]\n",Log,content
|
| 4 |
+
4,3004,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command
|
| 5 |
+
5,3050,"TERMINAL",0,0,"]633;E;2025-07-16 13:50:58 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;8d1bd821-a8b0-4413-aad3-038e7b28f0ba]633;C",,terminal_output
|
| 6 |
+
6,3072,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output
|
| 7 |
+
7,12393,"TERMINAL",0,0,"bash",,terminal_focus
|
| 8 |
+
8,14724,"TERMINAL",0,0,"queue",,terminal_command
|
| 9 |
+
9,14783,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:10 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;359Hhkn1991.localdomain: Wed Jul 16 13:51:10 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3348592 accelerat train_dy tum_cte0 R 13:50:48\t 2 hkn[0416,0421][5;12H3349982 accelerat interact tum_cte0 R\t9:47\t 2 hkn[0407,0511][6;12H3348397 accelerat train_dy tum_cte0 R 19:12:30\t 2 hkn[0810,0815][7;12H3348399 accelerat train_dy tum_cte0 R 19:12:30\t 2 hkn[0601,0603][8;12H3348400 accelerat train_dy tum_cte0 R 19:12:30\t 2 hkn[0604,0608][9;12H3350111 accelerat interact tum_cte0 R\t9:03\t 2 hkn[0415,0422][10;12H3345116 accelerat train_dy tum_cte0 R 1-19:56:15\t 2 hkn[0503,0506][67;403H",,terminal_output
|
| 10 |
+
10,15818,"TERMINAL",0,0,"[1;398H1[4;60H9[5d8[6d1[7d1[8d1[9d4[10d6[67;403H",,terminal_output
|
| 11 |
+
11,16869,"TERMINAL",0,0,"[1;398H2[4;59H50[5d9[6d2[7d2[8d2[9d5[10d7[67;403H",,terminal_output
|
| 12 |
+
12,17926,"TERMINAL",0,0,"[1;398H3[4;60H1[5d50[6d3[7d3[8d3[9d6[10d8[67;403H",,terminal_output
|
| 13 |
+
13,18971,"TERMINAL",0,0,"[1;398H4[4;60H2[5d1[6d4[7d4[8d4[9d7[10d9[67;403H",,terminal_output
|
| 14 |
+
14,20019,"TERMINAL",0,0,"[1;398H5[4;60H3[5d2[6d5[7d5[8d5[9d8[10d20[67;403H",,terminal_output
|
| 15 |
+
15,21074,"TERMINAL",0,0,"[1;398H6[4;60H4[5d3[6d6[7d6[8d6[9d9[10d1[67;403H",,terminal_output
|
| 16 |
+
16,22124,"TERMINAL",0,0,"[1;398H7[4;60H5[5d4[6d7[7d7[8d7[9d10[10d2[67;403H",,terminal_output
|
| 17 |
+
17,23156,"TERMINAL",0,0,"[1;398H8[4;60H6[5d5[6d8[7d8[8d8[9d1[10d3[67;403H",,terminal_output
|
| 18 |
+
18,24214,"TERMINAL",0,0,"[1;398H9[4;60H7[5d6[6d9[7d9[8d9[9d2[10d4[67;403H",,terminal_output
|
| 19 |
+
19,25251,"TERMINAL",0,0,"[1;397H20[4;60H8[5d7[6d40[7d40[8d40[9d3[10d5[67;403H",,terminal_output
|
| 20 |
+
20,25893,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 21 |
+
21,27896,"TERMINAL",0,0,"idling",,terminal_command
|
| 22 |
+
22,27951,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:23 idling;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: sinfo_t_idle[1;359Hhkn1991.localdomain: Wed Jul 16 13:51:23 2025[3;1HPartition dev_cpuonly[3;35H:\t 9 nodes idle\r[4dPartition cpuonly[4;35H:\t 3 nodes idle\r[5dPartition dev_accelerated[5;35H:\t 0 nodes idle\r[6dPartition accelerated[6;35H:\t 9 nodes idle\r[7dPartition dev_accelerated-h100 :\t 0 nodes idle\r[8dPartition accelerated-h100[8;35H:\t 0 nodes idle\r[9dPartition large[9;35H:\t 7 nodes idle[67;403H",,terminal_output
|
| 23 |
+
23,28992,"TERMINAL",0,0,"[1;398H4[67;403H",,terminal_output
|
| 24 |
+
24,29405,"TERMINAL",0,0,"bash",,terminal_focus
|
| 25 |
+
25,30037,"TERMINAL",0,0,"[1;398H5[67;403H",,terminal_output
|
| 26 |
+
26,31088,"TERMINAL",0,0,"[1;398H6[67;403H",,terminal_output
|
| 27 |
+
27,32138,"TERMINAL",0,0,"[1;398H7[67;403H",,terminal_output
|
| 28 |
+
28,32742,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5",,terminal_command
|
| 29 |
+
29,32797,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:28 salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5;dea9d5fc-91fd-447c-886d-4b0240ae057d]633;Csalloc: Granted job allocation 3350245\r\n",,terminal_output
|
| 30 |
+
30,32908,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output
|
| 31 |
+
31,33181,"TERMINAL",0,0,"[1;398H8[6;42H7[67;403H",,terminal_output
|
| 32 |
+
32,34023,"TERMINAL",0,0,"watch",,terminal_focus
|
| 33 |
+
33,34249,"TERMINAL",0,0,"[1;398H9[67;403H",,terminal_output
|
| 34 |
+
34,35275,"TERMINAL",0,0,"[1;397H30[67;403H",,terminal_output
|
| 35 |
+
35,35956,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 36 |
+
36,36736,"TERMINAL",0,0,"queue",,terminal_command
|
| 37 |
+
37,36791,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:32 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;359Hhkn1991.localdomain: Wed Jul 16 13:51:32 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3348592 accelerat train_dy tum_cte0 R 13:51:10\t 2 hkn[0416,0421][5;12H3349982 accelerat interact tum_cte0 R[56G10:09\t 2 hkn[0407,0511][6;12H3348397 accelerat train_dy tum_cte0 R 19:12:52\t 2 hkn[0810,0815][7;12H3348399 accelerat train_dy tum_cte0 R 19:12:52\t 2 hkn[0601,0603][8;12H3348400 accelerat train_dy tum_cte0 R 19:12:52\t 2 hkn[0604,0608][9;12H3350111 accelerat interact tum_cte0 R\t9:25\t 2 hkn[0415,0422][10;12H3350245 accelerat interact tum_cte0 R\t0:04\t 2 hkn[0423-0424][11;12H3345116 accelerat train_dy tum_cte0 R 1-19:56:37\t 2 hkn[0503,0506][67;403H",,terminal_output
|
| 38 |
+
38,37831,"TERMINAL",0,0,"[1;398H3[4;60H1[5d10[6d3[7d3[8d3[9d6[10d5[11d8[67;403H",,terminal_output
|
| 39 |
+
39,39190,"TERMINAL",0,0,"[1;398H4[4;60H2[5d1[6d4[7d4[8d4[9d7[10d6[11d9[67;403H",,terminal_output
|
| 40 |
+
40,40941,"TERMINAL",0,0,"[1;398H5[4;60H4[5d3[6d6[7d6[8d6[9d9[10d8[11d41[67;403H",,terminal_output
|
| 41 |
+
41,41454,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 42 |
+
42,48631,"TERMINAL",0,0,"scancel 3350111",,terminal_command
|
| 43 |
+
43,48681,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:44 scancel 3350111;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C",,terminal_output
|
| 44 |
+
44,48714,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output
|
| 45 |
+
45,49689,"TERMINAL",0,0,"queue",,terminal_command
|
| 46 |
+
46,49762,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:45 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;359Hhkn1991.localdomain: Wed Jul 16 13:51:45 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3350111 accelerat interact tum_cte0 CG\t9:37\t 2 hkn[0415,0422][5;12H3348592 accelerat train_dy tum_cte0 R 13:51:23\t 2 hkn[0416,0421][6;12H3349982 accelerat interact tum_cte0 R[56G10:22\t 2 hkn[0407,0511][7;12H3348397 accelerat train_dy tum_cte0 R 19:13:05\t 2 hkn[0810,0815][8;12H3348399 accelerat train_dy tum_cte0 R 19:13:05\t 2 hkn[0601,0603][9;12H3348400 accelerat train_dy tum_cte0 R 19:13:05\t 2 hkn[0604,0608][10;12H3350245 accelerat interact tum_cte0 R\t0:17\t 2 hkn[0423-0424][11;12H3345116 accelerat train_dy tum_cte0 R 1-19:56:50\t 2 hkn[0503,0506][67;403H",,terminal_output
|
| 47 |
+
47,50848,"TERMINAL",0,0,"[1;398H6[5;60H4[6d3[7d6[8d6[9d6[10d8[11d1[67;403H",,terminal_output
|
| 48 |
+
48,51854,"TERMINAL",0,0,"[1;398H7[5;60H5[6d4[7d7[8d7[9d7[10d9[11d2[67;403H",,terminal_output
|
| 49 |
+
49,52906,"TERMINAL",0,0,"[1;398H8[5;60H6[6d5[7d8[8d8[9d8[10d20[11d3[67;403H",,terminal_output
|
| 50 |
+
50,53965,"TERMINAL",0,0,"[1;398H9[5;60H7[6d6[7d9[8d9[9d9[10d1[11d4[67;403H",,terminal_output
|
| 51 |
+
51,55013,"TERMINAL",0,0,"[1;397H50[5;60H8[6d7[7d10[8d10[9d10[10d2[11d5[67;403H",,terminal_output
|
| 52 |
+
52,56066,"TERMINAL",0,0,"[1;398H1[5;60H9[6d8[7d1[8d1[9d1[10d3[11d6[67;403H",,terminal_output
|
| 53 |
+
53,57130,"TERMINAL",0,0,"[1;398H2[5;59H30[6d9[7d2[8d2[9d2[10d4[11d7[67;403H",,terminal_output
|
| 54 |
+
54,57167,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 55 |
+
55,59809,"TERMINAL",0,0,"scancel 3349982",,terminal_command
|
| 56 |
+
56,59820,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:55 scancel 3349982;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output
|
| 57 |
+
57,60880,"TERMINAL",0,0,"salloc: Nodes hkn[0423-0424] are ready for job\r\n",,terminal_output
|
| 58 |
+
58,61361,"TERMINAL",0,0,"queue",,terminal_command
|
| 59 |
+
59,61421,"TERMINAL",0,0,"]633;E;2025-07-16 13:51:56 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;359Hhkn1991.localdomain: Wed Jul 16 13:51:56 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3349982 accelerat interact tum_cte0 CG[56G10:32\t 2 hkn[0407,0511][5;12H3350111 accelerat interact tum_cte0 CG\t9:37\t 1 hkn0415[6;12H3348592 accelerat train_dy tum_cte0 R 13:51:34\t 2 hkn[0416,0421][7;12H3348397 accelerat train_dy tum_cte0 R 19:13:16\t 2 hkn[0810,0815][8;12H3348399 accelerat train_dy tum_cte0 R 19:13:16\t 2 hkn[0601,0603][9;12H3348400 accelerat train_dy tum_cte0 R 19:13:16\t 2 hkn[0604,0608][10;12H3350245 accelerat interact tum_cte0 R\t0:28\t 2 hkn[0423-0424][11;12H3345116 accelerat train_dy tum_cte0 R 1-19:57:01\t 2 hkn[0503,0506][67;403H",,terminal_output
|
| 60 |
+
60,61987,"TERMINAL",0,0,"]0;tum_cte0515@hkn0423:~/Projects/jafar[?2004h[tum_cte0515@hkn0423 jafar]$ ",,terminal_output
|
| 61 |
+
61,62457,"TERMINAL",0,0,"[1;398H7[6;60H6[7d8[8d8[9d8[10d30[11d3[67;403H",,terminal_output
|
| 62 |
+
62,63517,"TERMINAL",0,0,"[1;398H9[6;60H7[7d9[8d9[9d9[10d1[11d4[67;403H",,terminal_output
|
| 63 |
+
63,64554,"TERMINAL",0,0,"[1;395H2:00[6;60H8[7d20[8d20[9d20[10d2[11d5[67;403H",,terminal_output
|
| 64 |
+
64,65608,"TERMINAL",0,0,"[1;398H1[6;60H9[7d1[8d1[9d1[10d3[11d6[67;403H",,terminal_output
|
| 65 |
+
65,66695,"TERMINAL",0,0,"[1;398H2[6;59H40[7d2[8d2[9d2[10d4[11d7[67;403H",,terminal_output
|
| 66 |
+
66,67699,"TERMINAL",0,0,"[1;398H3[6;60H1[7d3[8d3[9d3[10d5[11d8[67;403H",,terminal_output
|
| 67 |
+
67,68748,"TERMINAL",0,0,"[1;398H4[6;60H2[7d4[8d4[9d4[10d6[11d9[67;403H",,terminal_output
|
| 68 |
+
68,69815,"TERMINAL",0,0,"[1;398H5[6;60H3[7d5[8d5[9d5[10d7[11d10[67;403H",,terminal_output
|
| 69 |
+
69,70853,"TERMINAL",0,0,"[1;398H6[4;67H1 hkn0407[K[6;60H4[7d6[8d6[9d6[10d8[11d1[67;403H",,terminal_output
|
| 70 |
+
70,71899,"TERMINAL",0,0,"[1;398H7[6;60H5[7d7[8d7[9d7[10d9[11d2[67;403H",,terminal_output
|
| 71 |
+
71,73029,"TERMINAL",0,0,"[1;398H8[6;60H6[7d8[8d8[9d8[10d40[11d3[67;403H",,terminal_output
|
| 72 |
+
72,74047,"TERMINAL",0,0,"[1;398H9[6;60H7[7d9[8d9[9d9[10d1[11d4[67;403H",,terminal_output
|
| 73 |
+
73,75046,"TERMINAL",0,0,"[1;397H10[6;60H8[7d30[8d30[9d30[10d2[11d5[67;403H",,terminal_output
|
| 74 |
+
74,76160,"TERMINAL",0,0,"[1;398H1[6;60H9[7d1[8d1[9d1[10d3[11d6[67;403H",,terminal_output
|
| 75 |
+
75,77184,"TERMINAL",0,0,"[1;398H2[6;59H50[7d2[8d2[9d2[10d4[11d7[67;403H",,terminal_output
|
| 76 |
+
76,78194,"TERMINAL",0,0,"[1;398H3[6;60H1[7d3[8d3[9d3[10d5[11d8[67;403H",,terminal_output
|
| 77 |
+
77,79248,"TERMINAL",0,0,"[1;398H4[6;60H2[7d4[8d4[9d4[10d6[11d9[67;403H",,terminal_output
|
| 78 |
+
78,80291,"TERMINAL",0,0,"[1;398H5[6;60H3[7d5[8d5[9d5[10d7[11d20[67;403H",,terminal_output
|
| 79 |
+
79,81385,"TERMINAL",0,0,"[1;398H6[6;60H4[7d6[8d6[9d6[10d8[11d1[67;403H",,terminal_output
|
| 80 |
+
80,82408,"TERMINAL",0,0,"[1;398H7[6;60H5[7d7[8d7[9d7[10d9[11d2[67;403H",,terminal_output
|
| 81 |
+
81,83532,"TERMINAL",0,0,"[1;398H8[6;60H6[7d8[8d8[9d8[10d50[11d3[67;403H",,terminal_output
|
| 82 |
+
82,84556,"TERMINAL",0,0,"[1;398H9[6;60H8[7d40[8d40[9d40[10d2[11d5[67;403H",,terminal_output
|
| 83 |
+
83,85537,"TERMINAL",0,0,"[1;397H21[6;60H9[7d1[8d1[9d1[10d3[11d6[67;403H",,terminal_output
|
| 84 |
+
84,86581,"TERMINAL",0,0,"[1;398H2[6;57H2:00[7d2[8d2[9d2[10d4[11d7[67;403H",,terminal_output
|
| 85 |
+
85,87738,"TERMINAL",0,0,"[1;398H3[6;60H1[7d3[8d3[9d3[10d5[11d8[67;403H",,terminal_output
|
| 86 |
+
86,88674,"TERMINAL",0,0,"[1;398H4[6;60H2[7d4[8d4[9d4[10d6[11d9[67;403H",,terminal_output
|
| 87 |
+
87,89798,"TERMINAL",0,0,"\r[11d[J[1;398H5[5;14H48592[5;30Htrain_dy[5;48H R 13:52:03\t 2 hkn[0416,0421][6;16H397[6;54H9:13:45[6;74H810,0815[7;18H9[7;60H5[7;74H601,0603[8;16H400[8;60H5[8;76H4,0608[9;14H50245[9;30Hinteract[9;53H 0:57[9;74H423-0424[10;14H45116[10;30Htrain_dy[10;51H1-19:57:30[10;74H503,0506[67;403H",,terminal_output
|
| 88 |
+
88,90806,"TERMINAL",0,0,"[1;398H6[5;60H4[6d6[7d6[8d6[9d8[10d1[67;403H",,terminal_output
|
| 89 |
+
89,91909,"TERMINAL",0,0,"[1;398H7[5;60H5[6d7[7d7[8d7[9d9[10d2[67;403H",,terminal_output
|
| 90 |
+
90,92943,"TERMINAL",0,0,"[1;398H8[5;60H6[6d8[7d8[8d8[9;57H1:00[10d3[67;403H",,terminal_output
|
| 91 |
+
91,93977,"TERMINAL",0,0,"[1;398H9[5;60H7[6d9[7d9[8d9[9d1[10d4[67;403H",,terminal_output
|
| 92 |
+
92,95002,"TERMINAL",0,0,"[1;397H30[5;60H8[6d50[7d50[8d50[9d2[10d5[67;403H",,terminal_output
|
| 93 |
+
93,96032,"TERMINAL",0,0,"[1;398H1[5;60H9[6d1[7d1[8d1[9d3[10d6[67;403H",,terminal_output
|
| 94 |
+
94,97062,"TERMINAL",0,0,"[1;398H2[5;59H10[6d2[7d2[8d2[9d4[10d7[67;403H",,terminal_output
|
| 95 |
+
95,98121,"TERMINAL",0,0,"[1;398H3[5;60H1[6d3[7d3[8d3[9d5[10d8[67;403H",,terminal_output
|
| 96 |
+
96,99204,"TERMINAL",0,0,"[1;398H4[5;60H2[6d4[7d4[8d4[9d6[10d9[67;403H",,terminal_output
|
| 97 |
+
97,100210,"TERMINAL",0,0,"[1;398H5[5;60H3[6d5[7d5[8d5[9d7[10d40[67;403H",,terminal_output
|
| 98 |
+
98,101290,"TERMINAL",0,0,"\r[10d[J[1;398H6[4;15H859[4;30Htrain_dy[4;48H R 13:52:14\t 2 hkn[0416,0421][5;16H397[5;54H9:13:56[5;74H810,0815[6;18H9[6;60H6[6;74H601,0603[7;16H400[7;60H6[7;76H4,0608[8;14H50245[8;30Hinteract[8;53H 1:08[8;74H423-0424[9;14H45116[9;30Htrain_dy[9;51H1-19:57:41[9;74H503,0506[67;403H",,terminal_output
|
| 99 |
+
99,102334,"TERMINAL",0,0,"[1;398H7[4;60H5[5d7[6d7[7d7[8d9[9d2[67;403H",,terminal_output
|
| 100 |
+
100,103359,"TERMINAL",0,0,"[1;398H8[4;60H6[5d8[6d8[7d8[8d10[9d3[67;403H",,terminal_output
|
| 101 |
+
101,104412,"TERMINAL",0,0,"[1;398H9[4;60H7[5d9[6d9[7d9[8d1[9d4[67;403H",,terminal_output
|
| 102 |
+
102,105476,"TERMINAL",0,0,"[1;397H40[4;60H9[5;57H4:01[6;57H4:01[7;57H4:01[8d3[9d6[67;403H",,terminal_output
|
| 103 |
+
103,106533,"TERMINAL",0,0,"[1;398H2[4;59H20[5d2[6d2[7d2[8d4[9d7[67;403H",,terminal_output
|
| 104 |
+
104,107610,"TERMINAL",0,0,"[1;398H3[4;60H1[5d3[6d3[7d3[8d5[9d8[67;403H",,terminal_output
|
| 105 |
+
105,108608,"TERMINAL",0,0,"[1;398H4[4;60H2[5d4[6d4[7d4[8d6[9d9[67;403H",,terminal_output
|
| 106 |
+
106,109651,"TERMINAL",0,0,"[1;398H5[4;60H3[5d5[6d5[7d5[8d7[9d50[67;403H",,terminal_output
|
| 107 |
+
107,110697,"TERMINAL",0,0,"[1;398H6[4;60H4[5d6[6d6[7d6[8d8[9d1[67;403H",,terminal_output
|
| 108 |
+
108,111743,"TERMINAL",0,0,"[1;398H7[4;60H5[5d7[6d7[7d7[8d9[9d2[67;403H",,terminal_output
|
| 109 |
+
109,112789,"TERMINAL",0,0,"[1;398H8[4;60H6[5d8[6d8[7d8[8d20[9d3[67;403H",,terminal_output
|
| 110 |
+
110,113948,"TERMINAL",0,0,"[1;398H9[4;60H7[5d9[6d9[7d9[8d1[9d4[67;403H",,terminal_output
|
| 111 |
+
111,114973,"TERMINAL",0,0,"[1;397H50[4;60H8[5d10[6d10[7d10[8d2[9d5[67;403H",,terminal_output
|
| 112 |
+
112,115997,"TERMINAL",0,0,"[1;398H1[4;60H9[5d1[6d1[7d1[8d3[9d6[67;403H",,terminal_output
|
| 113 |
+
113,117011,"TERMINAL",0,0,"[1;398H2[4;59H30[5d2[6d2[7d2[8d4[9d7[67;403H",,terminal_output
|
| 114 |
+
114,118148,"TERMINAL",0,0,"[1;398H3[4;60H1[5d3[6d3[7d3[8d5[9d8[67;403H",,terminal_output
|
| 115 |
+
115,119171,"TERMINAL",0,0,"[1;398H4[4;60H2[5d4[6d4[7d4[8d6[9d9[67;403H",,terminal_output
|
| 116 |
+
116,119335,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 117 |
+
117,120747,"TERMINAL",0,0,"smi",,terminal_command
|
| 118 |
+
118,120813,"TERMINAL",0,0,"]633;E;2025-07-16 13:52:56 smi;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: nvidia-smi[1;359Hhkn1991.localdomain: Wed Jul 16 13:52:56 2025[3;1Hsh: line 1: nvidia-smi: command not found[67;403H",,terminal_output
|
| 119 |
+
119,121696,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 120 |
+
120,122902,"TERMINAL",0,0,"srun",,terminal_focus
|
| 121 |
+
121,124100,"TERMINAL",0,0,"s",,terminal_output
|
| 122 |
+
122,124160,"TERMINAL",0,0,"m",,terminal_output
|
| 123 |
+
123,124220,"TERMINAL",0,0,"i",,terminal_output
|
| 124 |
+
124,124434,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output
|
| 125 |
+
125,126034,"TERMINAL",0,0,"[?1049h[22;0;0t[1;67r(B[m[4l[?7h[H[2JEvery 1.0s: nvidia-smi[1;359Hhkn0423.localdomain: Wed Jul 16 13:52:59 2025[3;1HWed Jul 16 13:53:00 2025\r[4d+-----------------------------------------------------------------------------------------+\r[5d| NVIDIA-SMI 570.133.20[5;37HDriver Version: 570.133.20 CUDA Version: 12.8 |\r[6d|-----------------------------------------+------------------------+----------------------+\r[7d| GPU Name[7;29HPersistence-M | Bus-Id[7;61HDisp.A | Volatile Uncorr. ECC |\r[8d| Fan Temp Perf[8;29HPwr:Usage/Cap |[8;55HMemory-Usage | GPU-Util Compute M. |\r[9d|[9;43H|[9;68H|[9;84HMIG M. |\r[10d|=========================================+========================+======================|\r[11d| 0 NVIDIA A100-SXM4-40GB[11;39HOn | 00000000:31:00.0 Off |[11;89H0 |\r[12d| N/A 44C P0[12;31H51W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r[13d|[13;43H|[13;68H|[13;82HDisabled |\r[14d+-----------------------------------------+------------------------+----------------------+\r[15d| 1 NVIDIA A100-SXM4-40GB[15;39HOn | 00000000:4B:00.0 Off |[15;89H0 |\r[16d| N/A 44C P0[16;31H54W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r[17d|[17;43H|[17;68H|[17;82HDisabled |\r[18d+-----------------------------------------+------------------------+----------------------+\r[19d| 2 NVIDIA A100-SXM4-40GB[19;39HOn | 00000000:CA:00.0 Off |[19;89H0 |\r[20d| N/A 44C P0[20;31H52W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r[21d|[21;43H|[21;68H|[21;82HDisabled |\r[22d+-----------------------------------------+------------------------+----------------------+\r[23d| 3 NVIDIA A100-SXM4-40GB[23;39HOn | 00000000:E3:00.0 Off |[23;89H0 |\r[24d| N/A 44C P0[24;31H53W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r[25d|[25;43H|[25;68H|[25;82HDisabled |\r[26d+-----------------------------------------+------------------------+----------------------+\r[28d+-----------------------------------------------------------------------------------------+\r[29d| Processes:[29;91H|\r[30d| GPU GI CI[30;31HPID Type Process name[30;80HGPU Memory |\r[31d|[31;10HID ID[31;80HUsage\t |\r[32d|=========================================================================================|\r[33d| 0 N/A N/A[33;30H2966[40GG /usr/libexec/Xorg[33;85H17MiB |\r[34d| 1 N/A N/A[34;30H2966[40GG /usr/libexec/Xorg[34;85H17MiB |\r[35d| 2 N/A N/A[35;30H2966[40GG /usr/libexec/Xorg[35;85H17MiB |\r[36d| 3 N/A N/A[36;30H2966[40GG /usr/libexec/Xorg[36;85H17MiB |\r[37d+-----------------------------------------------------------------------------------------+[67;403H",,terminal_output
|
| 126 |
+
126,126701,"TERMINAL",0,0,"[67;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn0423:~/Projects/jafar[?2004h[tum_cte0515@hkn0423 jafar]$ ",,terminal_output
|
| 127 |
+
127,127637,"TERMINAL",0,0,"s",,terminal_output
|
| 128 |
+
128,127696,"TERMINAL",0,0,"[?25l[8;31Ho[8;32H[?25h",,terminal_output
|
| 129 |
+
129,127836,"TERMINAL",0,0,"[?25l[8;32Hu[8;33H[?25h",,terminal_output
|
| 130 |
+
130,127899,"TERMINAL",0,0,"[?25l[8;33Hr[8;34H[?25h",,terminal_output
|
| 131 |
+
131,128110,"TERMINAL",0,0,"[?25l[8;34Hc[8;35H[?25h",,terminal_output
|
| 132 |
+
132,128230,"TERMINAL",0,0,"[?25l[8;35He[8;36H[?25h",,terminal_output
|
| 133 |
+
133,128289,"TERMINAL",0,0,"[?25l[8;36H [8;37H[?25h",,terminal_output
|
| 134 |
+
134,128466,"TERMINAL",0,0,"[?25l[8;37Hv[8;38H[?25h",,terminal_output
|
| 135 |
+
135,128982,"TERMINAL",0,0,"[?25l[8;37H.[8;39H[?25h",,terminal_output
|
| 136 |
+
136,129039,"TERMINAL",0,0,"[?25l[8;38Hv[8;39H[?25h",,terminal_output
|
| 137 |
+
137,129181,"TERMINAL",0,0,"env/",,terminal_output
|
| 138 |
+
138,129467,"TERMINAL",0,0,"[?25l[8;43Hb[8;44H[?25h",,terminal_output
|
| 139 |
+
139,129530,"TERMINAL",0,0,"in/",,terminal_output
|
| 140 |
+
140,130084,"TERMINAL",0,0,"[?25l[8;47Ha[8;48H[?25h",,terminal_output
|
| 141 |
+
141,130844,"TERMINAL",0,0,"[?25l[8;48Hc[8;49H[?25h",,terminal_output
|
| 142 |
+
142,131268,"TERMINAL",0,0,"tivate",,terminal_output
|
| 143 |
+
143,131542,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0423:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0423 jafar]$ ",,terminal_output
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-527e6f79-68be-4672-b584-2b15b1cf78281754060417734-2025_08_01-17.00.27.872/source.csv
ADDED
|
@@ -0,0 +1,1141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,142,"tasks",0,0,"",Log,tab
|
| 3 |
+
3,3516,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:00:27 PM [info] Activating crowd-code\n5:00:27 PM [info] Recording started\n5:00:27 PM [info] Initializing git provider using file system watchers...\n5:00:28 PM [info] Git repository found\n5:00:28 PM [info] Git provider initialized successfully\n5:00:28 PM [info] Initial git state: [object Object]\n",Log,tab
|
| 4 |
+
4,5906,"TERMINAL",0,0,"undefined[tum_cte0515@hkn1990 jafar]$ queue",,terminal_command
|
| 5 |
+
5,5967,"TERMINAL",0,0,"]633;E;2025-08-01 17:00:33 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h[22;0;0t[1;39r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;78Hhkn1990.localdomain: Fri Aug 1 17:00:33 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3390458 accelerat train_dy tum_cte0 PD\t0:00 12 (Priority)[5;12H3388151 accelerat train_to tum_cte0 R 5:57:25\t 2 hkn[0604-0605][6;12H3388153 accelerat train_to tum_cte0 R 5:57:25\t 2 hkn[0607-0608][7;12H3389801 accelerat train_dy tum_cte0 R 4:33:06\t 3 hkn[0405,0415,0417][39;122H",,terminal_output
|
| 6 |
+
6,7009,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 7 |
+
7,8074,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 8 |
+
8,9126,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 9 |
+
9,10156,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 10 |
+
10,11214,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 11 |
+
11,12231,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 12 |
+
12,13305,"TERMINAL",0,0,"[1;116H41[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 13 |
+
13,14310,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 14 |
+
14,15392,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 15 |
+
15,16410,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 16 |
+
16,17465,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 17 |
+
17,18491,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 18 |
+
18,19534,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 19 |
+
19,20586,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 20 |
+
20,21648,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 21 |
+
21,22695,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 22 |
+
22,23716,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 23 |
+
23,24795,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 24 |
+
24,25808,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 25 |
+
25,26867,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 26 |
+
26,27941,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 27 |
+
27,28977,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 28 |
+
28,30015,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 29 |
+
29,31063,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 30 |
+
30,32184,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 31 |
+
31,33175,"TERMINAL",0,0,"[1;114H1:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 32 |
+
32,34221,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 33 |
+
33,35266,"TERMINAL",0,0,"[1;117H2[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 34 |
+
34,36298,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 35 |
+
35,37331,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 36 |
+
36,38399,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 37 |
+
37,39433,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 38 |
+
38,40497,"TERMINAL",0,0,"[1;117H8[5;57H8:00[6;57H8:00[7d1[39;122H",,terminal_output
|
| 39 |
+
39,41540,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 40 |
+
40,42592,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 41 |
+
41,43615,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 42 |
+
42,44676,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 43 |
+
43,45741,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 44 |
+
44,46761,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 45 |
+
45,47822,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 46 |
+
46,48854,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 47 |
+
47,49895,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 48 |
+
48,50968,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 49 |
+
49,51989,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 50 |
+
50,53040,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 51 |
+
51,54111,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 52 |
+
52,55180,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 53 |
+
53,56226,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 54 |
+
54,57223,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 55 |
+
55,58282,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 56 |
+
56,59321,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H4:00[39;122H",,terminal_output
|
| 57 |
+
57,60374,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 58 |
+
58,61416,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 59 |
+
59,62476,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 60 |
+
60,63513,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 61 |
+
61,64582,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 62 |
+
62,65615,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 63 |
+
63,66708,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 64 |
+
64,67742,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 65 |
+
65,68754,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 66 |
+
66,69826,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 67 |
+
67,70906,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 68 |
+
68,71912,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 69 |
+
69,72979,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 70 |
+
70,74002,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 71 |
+
71,75076,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 72 |
+
72,76163,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 73 |
+
73,77175,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 74 |
+
74,78180,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 75 |
+
75,79233,"TERMINAL",0,0,"[1;117H6[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 76 |
+
76,80281,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 77 |
+
77,81337,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 78 |
+
78,82382,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 79 |
+
79,83467,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 80 |
+
80,84468,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 81 |
+
81,85520,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 82 |
+
82,86609,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 83 |
+
83,87650,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 84 |
+
84,88666,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 85 |
+
85,89710,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 86 |
+
86,90756,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 87 |
+
87,91804,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 88 |
+
88,92956,"TERMINAL",0,0,"[1;114H2:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 89 |
+
89,93978,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 90 |
+
90,95007,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 91 |
+
91,96005,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 92 |
+
92,97028,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 93 |
+
93,98080,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 94 |
+
94,99133,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 95 |
+
95,100183,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 96 |
+
96,101264,"TERMINAL",0,0,"[1;117H8[5;57H9:00[6;57H9:00[7d1[39;122H",,terminal_output
|
| 97 |
+
97,102304,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 98 |
+
98,103325,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 99 |
+
99,104426,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 100 |
+
100,105450,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 101 |
+
101,106459,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 102 |
+
102,107496,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 103 |
+
103,108624,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 104 |
+
104,109647,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 105 |
+
105,110641,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 106 |
+
106,111721,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 107 |
+
107,112734,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 108 |
+
108,113785,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 109 |
+
109,114837,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 110 |
+
110,115938,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 111 |
+
111,116971,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 112 |
+
112,117974,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 113 |
+
113,119127,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 114 |
+
114,120065,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H5:00[39;122H",,terminal_output
|
| 115 |
+
115,121147,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 116 |
+
116,122171,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 117 |
+
117,123212,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 118 |
+
118,124287,"TERMINAL",0,0,"[1;117H1[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 119 |
+
119,125304,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 120 |
+
120,126346,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 121 |
+
121,127464,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 122 |
+
122,128489,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 123 |
+
123,129512,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 124 |
+
124,130641,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 125 |
+
125,131636,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 126 |
+
126,132688,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 127 |
+
127,133820,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 128 |
+
128,134837,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 129 |
+
129,135828,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 130 |
+
130,136878,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 131 |
+
131,137926,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 132 |
+
132,138975,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 133 |
+
133,140024,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 134 |
+
134,141074,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 135 |
+
135,142194,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 136 |
+
136,143544,"TERMINAL",0,0,"[1;116H50[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 137 |
+
137,144670,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 138 |
+
138,145628,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 139 |
+
139,146714,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 140 |
+
140,147731,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 141 |
+
141,148778,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 142 |
+
142,149888,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 143 |
+
143,151034,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 144 |
+
144,152019,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 145 |
+
145,152970,"TERMINAL",0,0,"[1;114H3:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 146 |
+
146,154090,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 147 |
+
147,155069,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 148 |
+
148,156144,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 149 |
+
149,157199,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 150 |
+
150,158228,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 151 |
+
151,159306,"TERMINAL",0,0,"[1;117H6[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 152 |
+
152,160306,"TERMINAL",0,0,"[1;117H8[5;54H6:00:00[6;54H6:00:00[7d1[39;122H",,terminal_output
|
| 153 |
+
153,161365,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 154 |
+
154,162417,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 155 |
+
155,163510,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 156 |
+
156,164505,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 157 |
+
157,165559,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 158 |
+
158,166713,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 159 |
+
159,167661,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 160 |
+
160,168738,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 161 |
+
161,169755,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 162 |
+
162,170797,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 163 |
+
163,171847,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 164 |
+
164,172898,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 165 |
+
165,173942,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 166 |
+
166,175010,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 167 |
+
167,176086,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 168 |
+
168,177149,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 169 |
+
169,178154,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 170 |
+
170,179181,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 171 |
+
171,180306,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H6:00[39;122H",,terminal_output
|
| 172 |
+
172,181270,"TERMINAL",0,0,"[1;117H9[5;59H21[6d21[7d2[39;122H",,terminal_output
|
| 173 |
+
173,182359,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 174 |
+
174,183376,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 175 |
+
175,184423,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 176 |
+
176,185476,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 177 |
+
177,186521,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 178 |
+
178,187571,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 179 |
+
179,188626,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 180 |
+
180,189700,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 181 |
+
181,190719,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 182 |
+
182,191765,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 183 |
+
183,192865,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 184 |
+
184,193952,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 185 |
+
185,194949,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 186 |
+
186,195972,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 187 |
+
187,197096,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 188 |
+
188,198174,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 189 |
+
189,199145,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 190 |
+
190,200184,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 191 |
+
191,201218,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 192 |
+
192,202280,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 193 |
+
193,203325,"TERMINAL",0,0,"[1;116H51[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 194 |
+
194,204369,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 195 |
+
195,205412,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 196 |
+
196,206475,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 197 |
+
197,207510,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 198 |
+
198,208563,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 199 |
+
199,209651,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 200 |
+
200,210718,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 201 |
+
201,211712,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 202 |
+
202,212762,"TERMINAL",0,0,"[1;114H4:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 203 |
+
203,213890,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 204 |
+
204,214852,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 205 |
+
205,216042,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 206 |
+
206,216964,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 207 |
+
207,217997,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 208 |
+
208,219044,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 209 |
+
209,220184,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 210 |
+
210,221159,"TERMINAL",0,0,"[1;117H8[5;57H1:00[6;57H1:00[7d1[39;122H",,terminal_output
|
| 211 |
+
211,222195,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 212 |
+
212,223266,"TERMINAL",0,0,"[1;116H10[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 213 |
+
213,224297,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 214 |
+
214,225345,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 215 |
+
215,226445,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 216 |
+
216,227514,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 217 |
+
217,228536,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 218 |
+
218,229538,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 219 |
+
219,230585,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 220 |
+
220,231634,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 221 |
+
221,232733,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 222 |
+
222,233761,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 223 |
+
223,234777,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 224 |
+
224,235829,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 225 |
+
225,236868,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 226 |
+
226,237919,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 227 |
+
227,238981,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 228 |
+
228,240008,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H7:00[39;122H",,terminal_output
|
| 229 |
+
229,241098,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 230 |
+
230,242173,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 231 |
+
231,243275,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 232 |
+
232,244209,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 233 |
+
233,245258,"TERMINAL",0,0,"[1;117H2[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 234 |
+
234,246304,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 235 |
+
235,247356,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 236 |
+
236,248507,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 237 |
+
237,249526,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 238 |
+
238,250553,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 239 |
+
239,251539,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 240 |
+
240,252580,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 241 |
+
241,253631,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 242 |
+
242,254710,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 243 |
+
243,255726,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 244 |
+
244,256772,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 245 |
+
245,257817,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 246 |
+
246,258874,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 247 |
+
247,259972,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 248 |
+
248,260963,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 249 |
+
249,262018,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 250 |
+
250,263188,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 251 |
+
251,264172,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 252 |
+
252,265201,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 253 |
+
253,266212,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 254 |
+
254,267256,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 255 |
+
255,268308,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 256 |
+
256,269362,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 257 |
+
257,270397,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 258 |
+
258,271443,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 259 |
+
259,272520,"TERMINAL",0,0,"[1;114H5:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 260 |
+
260,273600,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 261 |
+
261,274616,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 262 |
+
262,275615,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 263 |
+
263,276680,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 264 |
+
264,277710,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 265 |
+
265,278814,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 266 |
+
266,279815,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 267 |
+
267,280964,"TERMINAL",0,0,"[1;117H8[5;57H2:00[6;57H2:00[7d1[39;122H",,terminal_output
|
| 268 |
+
268,281899,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 269 |
+
269,282951,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 270 |
+
270,283999,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 271 |
+
271,285042,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 272 |
+
272,286098,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 273 |
+
273,287199,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 274 |
+
274,288263,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 275 |
+
275,289346,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 276 |
+
276,290277,"TERMINAL",0,0,"[1;117H7[5;59H10[6d10[7d51[39;122H",,terminal_output
|
| 277 |
+
277,291331,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 278 |
+
278,292378,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 279 |
+
279,293429,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 280 |
+
280,294479,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 281 |
+
281,295604,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 282 |
+
282,296572,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 283 |
+
283,297654,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 284 |
+
284,298657,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 285 |
+
285,299705,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H8:00[39;122H",,terminal_output
|
| 286 |
+
286,300806,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 287 |
+
287,302674,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 288 |
+
288,303720,"TERMINAL",0,0,"[1;116H31[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 289 |
+
289,304674,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 290 |
+
290,305722,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 291 |
+
291,306770,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 292 |
+
292,307812,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 293 |
+
293,308907,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 294 |
+
294,309930,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 295 |
+
295,311005,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 296 |
+
296,312106,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 297 |
+
297,313121,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 298 |
+
298,314146,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 299 |
+
299,315167,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 300 |
+
300,316314,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 301 |
+
301,317243,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 302 |
+
302,318302,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 303 |
+
303,319349,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 304 |
+
304,320396,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 305 |
+
305,321456,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 306 |
+
306,322521,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 307 |
+
307,323554,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 308 |
+
308,324604,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 309 |
+
309,325699,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 310 |
+
310,326720,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 311 |
+
311,327768,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 312 |
+
312,328889,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 313 |
+
313,329915,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 314 |
+
314,330940,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 315 |
+
315,332064,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 316 |
+
316,333093,"TERMINAL",0,0,"[1;114H6:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 317 |
+
317,334216,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 318 |
+
318,335134,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 319 |
+
319,336191,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 320 |
+
320,337224,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 321 |
+
321,338270,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 322 |
+
322,339326,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 323 |
+
323,340372,"TERMINAL",0,0,"[1;117H8[5;57H3:00[6;57H3:00[7d1[39;122H",,terminal_output
|
| 324 |
+
324,341425,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 325 |
+
325,342508,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 326 |
+
326,343532,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 327 |
+
327,344658,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 328 |
+
328,345786,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 329 |
+
329,346679,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 330 |
+
330,347713,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 331 |
+
331,348857,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 332 |
+
332,349814,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 333 |
+
333,350865,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 334 |
+
334,351918,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 335 |
+
335,352968,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 336 |
+
336,354021,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 337 |
+
337,355076,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 338 |
+
338,356128,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 339 |
+
339,357212,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 340 |
+
340,358224,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 341 |
+
341,359302,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H9:00[39;122H",,terminal_output
|
| 342 |
+
342,360318,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 343 |
+
343,361357,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 344 |
+
344,362582,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 345 |
+
345,363606,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 346 |
+
346,364515,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 347 |
+
347,365562,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 348 |
+
348,366612,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 349 |
+
349,367681,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 350 |
+
350,368710,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 351 |
+
351,369777,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 352 |
+
352,370862,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 353 |
+
353,371896,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 354 |
+
354,372907,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 355 |
+
355,374099,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 356 |
+
356,375072,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 357 |
+
357,376096,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 358 |
+
358,377124,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 359 |
+
359,378166,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 360 |
+
360,379269,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 361 |
+
361,380295,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 362 |
+
362,381273,"TERMINAL",0,0,"[1;117H8[5;59H41[6d41[7d2[39;122H",,terminal_output
|
| 363 |
+
363,382315,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 364 |
+
364,383394,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 365 |
+
365,384516,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 366 |
+
366,385565,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 367 |
+
367,386644,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 368 |
+
368,387656,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 369 |
+
369,388757,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 370 |
+
370,389816,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 371 |
+
371,390843,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 372 |
+
372,391888,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 373 |
+
373,392993,"TERMINAL",0,0,"[1;114H7:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 374 |
+
374,394146,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 375 |
+
375,395039,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 376 |
+
376,396170,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 377 |
+
377,397116,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 378 |
+
378,398156,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 379 |
+
379,399207,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 380 |
+
380,400278,"TERMINAL",0,0,"[1;117H7[5;57H4:00[6;57H4:00[7d41[39;122H",,terminal_output
|
| 381 |
+
381,401302,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 382 |
+
382,402337,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 383 |
+
383,403378,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 384 |
+
384,404428,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 385 |
+
385,405485,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 386 |
+
386,406611,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 387 |
+
387,407637,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 388 |
+
388,408659,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 389 |
+
389,409683,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 390 |
+
390,410713,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 391 |
+
391,411759,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 392 |
+
392,412806,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 393 |
+
393,413872,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 394 |
+
394,414935,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 395 |
+
395,416055,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 396 |
+
396,417057,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 397 |
+
397,418042,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 398 |
+
398,419085,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 399 |
+
399,420429,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;56H40:00[39;122H",,terminal_output
|
| 400 |
+
400,421257,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 401 |
+
401,422278,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 402 |
+
402,423303,"TERMINAL",0,0,"[1;116H31[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 403 |
+
403,424326,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 404 |
+
404,425375,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 405 |
+
405,426419,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 406 |
+
406,427474,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 407 |
+
407,428519,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 408 |
+
408,429595,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 409 |
+
409,430607,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 410 |
+
410,431656,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 411 |
+
411,432765,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 412 |
+
412,433890,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 413 |
+
413,434874,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 414 |
+
414,435851,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 415 |
+
415,437167,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 416 |
+
416,438053,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 417 |
+
417,438995,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 418 |
+
418,440051,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 419 |
+
419,441090,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 420 |
+
420,442147,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 421 |
+
421,443182,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 422 |
+
422,444278,"TERMINAL",0,0,"[1;117H1[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 423 |
+
423,445358,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 424 |
+
424,446323,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 425 |
+
425,447366,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 426 |
+
426,448404,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 427 |
+
427,449524,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 428 |
+
428,450542,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 429 |
+
429,451565,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 430 |
+
430,452592,"TERMINAL",0,0,"[1;114H8:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 431 |
+
431,453653,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 432 |
+
432,454690,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 433 |
+
433,455771,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 434 |
+
434,456789,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 435 |
+
435,457834,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 436 |
+
436,458936,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 437 |
+
437,459929,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 438 |
+
438,460980,"TERMINAL",0,0,"[1;117H8[5;57H5:00[6;57H5:00[7d1[39;122H",,terminal_output
|
| 439 |
+
439,462219,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 440 |
+
440,463136,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 441 |
+
441,464131,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 442 |
+
442,465286,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 443 |
+
443,466317,"TERMINAL",0,0,"[1;117H3[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 444 |
+
444,467286,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 445 |
+
445,468344,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 446 |
+
446,469394,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 447 |
+
447,470424,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 448 |
+
448,471477,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 449 |
+
449,472555,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 450 |
+
450,473674,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 451 |
+
451,474723,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 452 |
+
452,475844,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 453 |
+
453,476723,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 454 |
+
454,477776,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 455 |
+
455,478821,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 456 |
+
456,479874,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H1:00[39;122H",,terminal_output
|
| 457 |
+
457,480913,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 458 |
+
458,481983,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 459 |
+
459,483009,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 460 |
+
460,484065,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 461 |
+
461,485152,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 462 |
+
462,486177,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 463 |
+
463,487231,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 464 |
+
464,488235,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 465 |
+
465,489291,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 466 |
+
466,490338,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 467 |
+
467,491388,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 468 |
+
468,492440,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 469 |
+
469,493478,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 470 |
+
470,494532,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 471 |
+
471,495580,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 472 |
+
472,496647,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 473 |
+
473,497672,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 474 |
+
474,498733,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 475 |
+
475,499816,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 476 |
+
476,500817,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 477 |
+
477,501947,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 478 |
+
478,502952,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 479 |
+
479,503999,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 480 |
+
480,505122,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 481 |
+
481,506251,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 482 |
+
482,507191,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 483 |
+
483,508300,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 484 |
+
484,509332,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 485 |
+
485,510337,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 486 |
+
486,511384,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 487 |
+
487,512420,"TERMINAL",0,0,"[1;114H9:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 488 |
+
488,513512,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 489 |
+
489,514507,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 490 |
+
490,515567,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 491 |
+
491,516603,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 492 |
+
492,517716,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 493 |
+
493,518834,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 494 |
+
494,519766,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 495 |
+
495,520891,"TERMINAL",0,0,"[1;117H8[5;57H6:00[6;57H6:00[7d1[39;122H",,terminal_output
|
| 496 |
+
496,521916,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 497 |
+
497,522899,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 498 |
+
498,523952,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 499 |
+
499,524999,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 500 |
+
500,526046,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 501 |
+
501,527122,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 502 |
+
502,528146,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 503 |
+
503,529223,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 504 |
+
504,530304,"TERMINAL",0,0,"[1;117H7[5;59H10[6d10[7d51[39;122H",,terminal_output
|
| 505 |
+
505,531283,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 506 |
+
506,532328,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 507 |
+
507,533383,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 508 |
+
508,534434,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 509 |
+
509,535535,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 510 |
+
510,536557,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 511 |
+
511,537587,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 512 |
+
512,538710,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 513 |
+
513,539730,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H2:00[39;122H",,terminal_output
|
| 514 |
+
514,540786,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 515 |
+
515,541837,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 516 |
+
516,542875,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 517 |
+
517,543929,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 518 |
+
518,544965,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 519 |
+
519,546041,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 520 |
+
520,547065,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 521 |
+
521,548159,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 522 |
+
522,549279,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 523 |
+
523,550283,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 524 |
+
524,551244,"TERMINAL",0,0,"[1;117H8[5;59H31[6d31[7d2[39;122H",,terminal_output
|
| 525 |
+
525,552328,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 526 |
+
526,553355,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 527 |
+
527,554554,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 528 |
+
528,555447,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 529 |
+
529,556486,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 530 |
+
530,557529,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 531 |
+
531,558570,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 532 |
+
532,559643,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 533 |
+
533,560661,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 534 |
+
534,561707,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 535 |
+
535,562771,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 536 |
+
536,563903,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 537 |
+
537,564922,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 538 |
+
538,565948,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 539 |
+
539,566970,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 540 |
+
540,568012,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 541 |
+
541,569120,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 542 |
+
542,570144,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 543 |
+
543,571174,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 544 |
+
544,572255,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 545 |
+
545,573266,"TERMINAL",0,0,"[1;113H10:00[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 546 |
+
546,574314,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 547 |
+
547,575450,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 548 |
+
548,576459,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 549 |
+
549,577517,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 550 |
+
550,578627,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 551 |
+
551,579672,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 552 |
+
552,580693,"TERMINAL",0,0,"[1;117H8[5;57H7:00[6;57H7:00[7d1[39;122H",,terminal_output
|
| 553 |
+
553,581704,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 554 |
+
554,582783,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 555 |
+
555,583866,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 556 |
+
556,584847,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 557 |
+
557,585925,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 558 |
+
558,587042,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 559 |
+
559,588065,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 560 |
+
560,589034,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 561 |
+
561,590107,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 562 |
+
562,591135,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 563 |
+
563,592205,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 564 |
+
564,593265,"TERMINAL",0,0,"[1;116H20[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 565 |
+
565,594265,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 566 |
+
566,595339,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 567 |
+
567,596709,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 568 |
+
568,597779,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 569 |
+
569,598805,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 570 |
+
570,599871,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H3:00[39;122H",,terminal_output
|
| 571 |
+
571,600909,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 572 |
+
572,601953,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 573 |
+
573,602996,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 574 |
+
574,604056,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 575 |
+
575,605103,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 576 |
+
576,606158,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 577 |
+
577,607239,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 578 |
+
578,608362,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 579 |
+
579,609453,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 580 |
+
580,610337,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 581 |
+
581,611386,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 582 |
+
582,612443,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 583 |
+
583,613481,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 584 |
+
584,614532,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 585 |
+
585,615583,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 586 |
+
586,616660,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 587 |
+
587,617683,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 588 |
+
588,618742,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 589 |
+
589,619768,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 590 |
+
590,620833,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 591 |
+
591,621897,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 592 |
+
592,623008,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 593 |
+
593,624010,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 594 |
+
594,625033,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 595 |
+
595,626072,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 596 |
+
596,627445,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 597 |
+
597,628621,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 598 |
+
598,629641,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 599 |
+
599,630668,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 600 |
+
600,631677,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 601 |
+
601,632684,"TERMINAL",0,0,"[1;114H1:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 602 |
+
602,633737,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 603 |
+
603,634790,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 604 |
+
604,635853,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 605 |
+
605,636886,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 606 |
+
606,637946,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 607 |
+
607,639023,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 608 |
+
608,640021,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 609 |
+
609,641115,"TERMINAL",0,0,"[1;117H8[5;57H8:00[6;57H8:00[7d1[39;122H",,terminal_output
|
| 610 |
+
610,642131,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 611 |
+
611,643299,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 612 |
+
612,644316,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 613 |
+
613,645306,"TERMINAL",0,0,"[1;117H2[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 614 |
+
614,646294,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 615 |
+
615,647333,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 616 |
+
616,648384,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 617 |
+
617,649468,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 618 |
+
618,650518,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 619 |
+
619,651590,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 620 |
+
620,652618,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 621 |
+
621,653677,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 622 |
+
622,654724,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 623 |
+
623,655763,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 624 |
+
624,656815,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 625 |
+
625,657869,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 626 |
+
626,658925,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 627 |
+
627,659966,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H4:00[39;122H",,terminal_output
|
| 628 |
+
628,661080,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 629 |
+
629,662100,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 630 |
+
630,663128,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 631 |
+
631,664256,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 632 |
+
632,665231,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 633 |
+
633,666250,"TERMINAL",0,0,"[1;117H3[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 634 |
+
634,667324,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 635 |
+
635,668359,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 636 |
+
636,669397,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 637 |
+
637,670447,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 638 |
+
638,671523,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 639 |
+
639,672652,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 640 |
+
640,673593,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 641 |
+
641,674695,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 642 |
+
642,675722,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 643 |
+
643,676734,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 644 |
+
644,677787,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 645 |
+
645,678837,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 646 |
+
646,679887,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 647 |
+
647,680948,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 648 |
+
648,682027,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 649 |
+
649,683036,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 650 |
+
650,684113,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 651 |
+
651,685139,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 652 |
+
652,686271,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 653 |
+
653,687323,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 654 |
+
654,688316,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 655 |
+
655,689340,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 656 |
+
656,690376,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 657 |
+
657,691417,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 658 |
+
658,692468,"TERMINAL",0,0,"[1;114H2:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 659 |
+
659,693511,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 660 |
+
660,694570,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 661 |
+
661,695604,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 662 |
+
662,696678,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 663 |
+
663,697735,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 664 |
+
664,698835,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 665 |
+
665,699917,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 666 |
+
666,700910,"TERMINAL",0,0,"[1;117H8[5;57H9:00[6;57H9:00[7d1[39;122H",,terminal_output
|
| 667 |
+
667,701908,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 668 |
+
668,703060,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 669 |
+
669,704089,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 670 |
+
670,705108,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 671 |
+
671,706132,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 672 |
+
672,707162,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 673 |
+
673,708323,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 674 |
+
674,709339,"TERMINAL",0,0,"[1;117H6[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 675 |
+
675,710349,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 676 |
+
676,711369,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 677 |
+
677,712435,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 678 |
+
678,713465,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 679 |
+
679,714518,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 680 |
+
680,715602,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 681 |
+
681,716682,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 682 |
+
682,717704,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 683 |
+
683,718708,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 684 |
+
684,719855,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H5:00[39;122H",,terminal_output
|
| 685 |
+
685,720879,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 686 |
+
686,721853,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 687 |
+
687,722928,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 688 |
+
688,724156,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 689 |
+
689,725001,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 690 |
+
690,726049,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 691 |
+
691,727104,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 692 |
+
692,728148,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 693 |
+
693,729201,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 694 |
+
694,730261,"TERMINAL",0,0,"[1;117H7[5;59H30[6d30[7d11[39;122H",,terminal_output
|
| 695 |
+
695,731330,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 696 |
+
696,732355,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 697 |
+
697,733406,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 698 |
+
698,734457,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 699 |
+
699,735507,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 700 |
+
700,736560,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 701 |
+
701,737609,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 702 |
+
702,738703,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 703 |
+
703,739709,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 704 |
+
704,740753,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 705 |
+
705,741803,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 706 |
+
706,742850,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 707 |
+
707,743904,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 708 |
+
708,744990,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 709 |
+
709,746458,"TERMINAL",0,0,"[1;117H3[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 710 |
+
710,747499,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 711 |
+
711,748630,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 712 |
+
712,749611,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 713 |
+
713,750679,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 714 |
+
714,751685,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 715 |
+
715,752727,"TERMINAL",0,0,"[1;114H3:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 716 |
+
716,753968,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 717 |
+
717,754898,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 718 |
+
718,755927,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 719 |
+
719,756928,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 720 |
+
720,758050,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 721 |
+
721,759075,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 722 |
+
722,760102,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 723 |
+
723,761124,"TERMINAL",0,0,"[1;117H8[5;56H10:00[6;56H10:00[7d1[39;122H",,terminal_output
|
| 724 |
+
724,762196,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 725 |
+
725,763302,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 726 |
+
726,764269,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 727 |
+
727,765352,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 728 |
+
728,766447,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 729 |
+
729,767411,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 730 |
+
730,768461,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 731 |
+
731,769621,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 732 |
+
732,770647,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 733 |
+
733,771611,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 734 |
+
734,772651,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 735 |
+
735,773702,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 736 |
+
736,774779,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 737 |
+
737,775806,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 738 |
+
738,776857,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 739 |
+
739,777923,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 740 |
+
740,779019,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 741 |
+
741,780102,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H6:00[39;122H",,terminal_output
|
| 742 |
+
742,781052,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 743 |
+
743,782114,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 744 |
+
744,783244,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 745 |
+
745,784382,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 746 |
+
746,785290,"TERMINAL",0,0,"[1;117H2[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 747 |
+
747,786416,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 748 |
+
748,787373,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 749 |
+
749,788373,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 750 |
+
750,789425,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 751 |
+
751,790463,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 752 |
+
752,791527,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 753 |
+
753,792582,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 754 |
+
754,793659,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 755 |
+
755,794736,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 756 |
+
756,795738,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 757 |
+
757,796758,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 758 |
+
758,797885,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 759 |
+
759,798909,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 760 |
+
760,799932,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 761 |
+
761,800960,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 762 |
+
762,802086,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 763 |
+
763,803031,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 764 |
+
764,804076,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 765 |
+
765,805125,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 766 |
+
766,806169,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 767 |
+
767,807271,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 768 |
+
768,808370,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 769 |
+
769,809392,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 770 |
+
770,810381,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 771 |
+
771,811428,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 772 |
+
772,812477,"TERMINAL",0,0,"[1;114H4:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 773 |
+
773,813530,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 774 |
+
774,814579,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 775 |
+
775,815625,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 776 |
+
776,816677,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 777 |
+
777,817755,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 778 |
+
778,818780,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 779 |
+
779,819814,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 780 |
+
780,820929,"TERMINAL",0,0,"[1;117H8[5;57H1:00[6;57H1:00[7d1[39;122H",,terminal_output
|
| 781 |
+
781,821949,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 782 |
+
782,822977,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 783 |
+
783,824104,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 784 |
+
784,825047,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 785 |
+
785,826148,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 786 |
+
786,827170,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 787 |
+
787,828206,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 788 |
+
788,829252,"TERMINAL",0,0,"[1;117H6[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 789 |
+
789,830296,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 790 |
+
790,831352,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 791 |
+
791,832402,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 792 |
+
792,833441,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 793 |
+
793,834584,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 794 |
+
794,835542,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 795 |
+
795,836679,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 796 |
+
796,837717,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 797 |
+
797,838774,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 798 |
+
798,839804,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H7:00[39;122H",,terminal_output
|
| 799 |
+
799,840791,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 800 |
+
800,841836,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 801 |
+
801,843075,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 802 |
+
802,843964,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 803 |
+
803,844996,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 804 |
+
804,846034,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 805 |
+
805,847141,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 806 |
+
806,848114,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 807 |
+
807,849166,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 808 |
+
808,850219,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 809 |
+
809,851256,"TERMINAL",0,0,"[1;117H8[5;59H31[6d31[7d2[39;122H",,terminal_output
|
| 810 |
+
810,852308,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 811 |
+
811,853418,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 812 |
+
812,854412,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 813 |
+
813,855448,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 814 |
+
814,856498,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 815 |
+
815,857547,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 816 |
+
816,858607,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 817 |
+
817,859842,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 818 |
+
818,860866,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 819 |
+
819,861762,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 820 |
+
820,862789,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 821 |
+
821,863836,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 822 |
+
822,864884,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 823 |
+
823,865958,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 824 |
+
824,867211,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 825 |
+
825,868233,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 826 |
+
826,869232,"TERMINAL",0,0,"[1;117H6[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 827 |
+
827,870272,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 828 |
+
828,871324,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 829 |
+
829,872372,"TERMINAL",0,0,"[1;114H5:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 830 |
+
830,873417,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 831 |
+
831,874479,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 832 |
+
832,875510,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 833 |
+
833,876546,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 834 |
+
834,877655,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 835 |
+
835,878647,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 836 |
+
836,879709,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 837 |
+
837,880734,"TERMINAL",0,0,"[1;117H8[5;57H2:00[6;57H2:00[7d1[39;122H",,terminal_output
|
| 838 |
+
838,881789,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 839 |
+
839,882837,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 840 |
+
840,883884,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 841 |
+
841,884955,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 842 |
+
842,886004,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 843 |
+
843,887107,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 844 |
+
844,888080,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 845 |
+
845,889123,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 846 |
+
846,890174,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 847 |
+
847,891454,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 848 |
+
848,892253,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 849 |
+
849,893298,"TERMINAL",0,0,"[1;116H21[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 850 |
+
850,894348,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 851 |
+
851,895444,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 852 |
+
852,896453,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 853 |
+
853,897486,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 854 |
+
854,898540,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 855 |
+
855,899589,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H8:00[39;122H",,terminal_output
|
| 856 |
+
856,900628,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 857 |
+
857,901687,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 858 |
+
858,902745,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 859 |
+
859,903774,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 860 |
+
860,904998,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 861 |
+
861,906020,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 862 |
+
862,907045,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 863 |
+
863,908176,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 864 |
+
864,909101,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 865 |
+
865,910149,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 866 |
+
866,911200,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 867 |
+
867,912270,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 868 |
+
868,913357,"TERMINAL",0,0,"[1;116H41[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 869 |
+
869,914449,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 870 |
+
870,915444,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 871 |
+
871,916465,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 872 |
+
872,917495,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 873 |
+
873,918513,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 874 |
+
874,919561,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 875 |
+
875,920608,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 876 |
+
876,921669,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 877 |
+
877,922700,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 878 |
+
878,923766,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 879 |
+
879,924792,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 880 |
+
880,925872,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 881 |
+
881,926963,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 882 |
+
882,927974,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 883 |
+
883,929061,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 884 |
+
884,930086,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 885 |
+
885,931123,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 886 |
+
886,932236,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 887 |
+
887,933260,"TERMINAL",0,0,"[1;114H6:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 888 |
+
888,934284,"TERMINAL",0,0,"[1;117H1[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 889 |
+
889,935455,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 890 |
+
890,936434,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 891 |
+
891,937446,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 892 |
+
892,938457,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 893 |
+
893,939501,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 894 |
+
894,940551,"TERMINAL",0,0,"[1;117H8[5;57H3:00[6;57H3:00[7d1[39;122H",,terminal_output
|
| 895 |
+
895,941605,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 896 |
+
896,942659,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 897 |
+
897,943705,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 898 |
+
898,944867,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 899 |
+
899,945860,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 900 |
+
900,946835,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 901 |
+
901,947904,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 902 |
+
902,949029,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 903 |
+
903,950057,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 904 |
+
904,951075,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 905 |
+
905,952080,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 906 |
+
906,953128,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 907 |
+
907,954174,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 908 |
+
908,955228,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 909 |
+
909,956274,"TERMINAL",0,0,"[1;117H3[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 910 |
+
910,957352,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 911 |
+
911,958369,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 912 |
+
912,959474,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H9:00[39;122H",,terminal_output
|
| 913 |
+
913,960496,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 914 |
+
914,961524,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 915 |
+
915,962546,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 916 |
+
916,963776,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 917 |
+
917,964695,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 918 |
+
918,965682,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 919 |
+
919,966750,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 920 |
+
920,967797,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 921 |
+
921,968813,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 922 |
+
922,969892,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 923 |
+
923,970978,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 924 |
+
924,971964,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 925 |
+
925,973097,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 926 |
+
926,974116,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 927 |
+
927,975142,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 928 |
+
928,976166,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 929 |
+
929,977293,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 930 |
+
930,978236,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 931 |
+
931,979280,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 932 |
+
932,980330,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 933 |
+
933,981369,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 934 |
+
934,982441,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 935 |
+
935,983490,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 936 |
+
936,984571,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 937 |
+
937,985622,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 938 |
+
938,986616,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 939 |
+
939,987671,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 940 |
+
940,988759,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 941 |
+
941,989886,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 942 |
+
942,990912,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 943 |
+
943,991871,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 944 |
+
944,992976,"TERMINAL",0,0,"[1;114H7:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 945 |
+
945,993968,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 946 |
+
946,995018,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 947 |
+
947,996072,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 948 |
+
948,997135,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 949 |
+
949,998169,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 950 |
+
950,999229,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 951 |
+
951,1000318,"TERMINAL",0,0,"[1;117H7[5;57H4:00[6;57H4:00[7d41[39;122H",,terminal_output
|
| 952 |
+
952,1001384,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 953 |
+
953,1002382,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 954 |
+
954,1003455,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 955 |
+
955,1004531,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 956 |
+
956,1005553,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 957 |
+
957,1006575,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 958 |
+
958,1007626,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 959 |
+
959,1008731,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 960 |
+
960,1009754,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 961 |
+
961,1010779,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 962 |
+
962,1011818,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 963 |
+
963,1012874,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 964 |
+
964,1013921,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 965 |
+
965,1014977,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 966 |
+
966,1016009,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 967 |
+
967,1017129,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 968 |
+
968,1018161,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 969 |
+
969,1019173,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 970 |
+
970,1020297,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;56H50:00[39;122H",,terminal_output
|
| 971 |
+
971,1021334,"TERMINAL",0,0,"[1;117H8[5;59H21[6d21[7d2[39;122H",,terminal_output
|
| 972 |
+
972,1022349,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 973 |
+
973,1023362,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 974 |
+
974,1024415,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 975 |
+
975,1025462,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 976 |
+
976,1026516,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 977 |
+
977,1027575,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 978 |
+
978,1028622,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 979 |
+
979,1029700,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 980 |
+
980,1030716,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 981 |
+
981,1031773,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 982 |
+
982,1032837,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 983 |
+
983,1033918,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 984 |
+
984,1034942,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 985 |
+
985,1035965,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 986 |
+
986,1036990,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 987 |
+
987,1038119,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 988 |
+
988,1039091,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 989 |
+
989,1040145,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 990 |
+
990,1041190,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 991 |
+
991,1042247,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 992 |
+
992,1043306,"TERMINAL",0,0,"[1;116H51[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 993 |
+
993,1044364,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 994 |
+
994,1045389,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 995 |
+
995,1046550,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 996 |
+
996,1047490,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 997 |
+
997,1048562,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 998 |
+
998,1049561,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 999 |
+
999,1050604,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 1000 |
+
1000,1051644,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1001 |
+
1001,1052763,"TERMINAL",0,0,"[1;114H8:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1002 |
+
1002,1053729,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1003 |
+
1003,1055117,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1004 |
+
1004,1056256,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1005 |
+
1005,1057334,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1006 |
+
1006,1058292,"TERMINAL",0,0,"[1;117H5[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1007 |
+
1007,1059322,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 1008 |
+
1008,1060347,"TERMINAL",0,0,"[1;117H8[5;57H5:00[6;57H5:00[7d1[39;122H",,terminal_output
|
| 1009 |
+
1009,1061394,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1010 |
+
1010,1062431,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1011 |
+
1011,1063538,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1012 |
+
1012,1064546,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1013 |
+
1013,1065590,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1014 |
+
1014,1066673,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1015 |
+
1015,1067688,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1016 |
+
1016,1068740,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1017 |
+
1017,1069790,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 1018 |
+
1018,1070910,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 1019 |
+
1019,1071873,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1020 |
+
1020,1072915,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1021 |
+
1021,1073977,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1022 |
+
1022,1075022,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1023 |
+
1023,1076063,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1024 |
+
1024,1077170,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1025 |
+
1025,1078164,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1026 |
+
1026,1079292,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1027 |
+
1027,1080307,"TERMINAL",0,0,"[1;117H7[5;59H20[6d20[7;57H1:01[39;122H",,terminal_output
|
| 1028 |
+
1028,1081329,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1029 |
+
1029,1082364,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1030 |
+
1030,1083412,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1031 |
+
1031,1084614,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1032 |
+
1032,1085534,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1033 |
+
1033,1086553,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1034 |
+
1034,1087633,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1035 |
+
1035,1088648,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1036 |
+
1036,1089697,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 1037 |
+
1037,1090756,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 1038 |
+
1038,1091788,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1039 |
+
1039,1092843,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1040 |
+
1040,1093927,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1041 |
+
1041,1094950,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1042 |
+
1042,1095972,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1043 |
+
1043,1097106,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1044 |
+
1044,1098124,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1045 |
+
1045,1099132,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1046 |
+
1046,1100165,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 1047 |
+
1047,1101215,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 1048 |
+
1048,1102275,"TERMINAL",0,0,"[1;117H9[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1049 |
+
1049,1103346,"TERMINAL",0,0,"[1;116H51[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1050 |
+
1050,1104374,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1051 |
+
1051,1105396,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1052 |
+
1052,1106522,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1053 |
+
1053,1107593,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1054 |
+
1054,1108571,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1055 |
+
1055,1109595,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 1056 |
+
1056,1110611,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 1057 |
+
1057,1111673,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1058 |
+
1058,1112713,"TERMINAL",0,0,"[1;114H9:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1059 |
+
1059,1113775,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1060 |
+
1060,1114816,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1061 |
+
1061,1115867,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1062 |
+
1062,1116895,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1063 |
+
1063,1117942,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1064 |
+
1064,1119016,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1065 |
+
1065,1120048,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 1066 |
+
1066,1121089,"TERMINAL",0,0,"[1;117H8[5;57H6:00[6;57H6:00[7d1[39;122H",,terminal_output
|
| 1067 |
+
1067,1122186,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1068 |
+
1068,1123213,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1069 |
+
1069,1124240,"TERMINAL",0,0,"[1;117H1[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1070 |
+
1070,1125293,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1071 |
+
1071,1126345,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1072 |
+
1072,1127385,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1073 |
+
1073,1128454,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1074 |
+
1074,1129535,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 1075 |
+
1075,1130581,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 1076 |
+
1076,1131627,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1077 |
+
1077,1132651,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1078 |
+
1078,1133707,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1079 |
+
1079,1134756,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1080 |
+
1080,1135912,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1081 |
+
1081,1136855,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1082 |
+
1082,1137928,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1083 |
+
1083,1138948,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1084 |
+
1084,1140025,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H2:00[39;122H",,terminal_output
|
| 1085 |
+
1085,1141047,"TERMINAL",0,0,"[1;117H8[5;59H20[6d20[7d1[39;122H",,terminal_output
|
| 1086 |
+
1086,1142093,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1087 |
+
1087,1143152,"TERMINAL",0,0,"[1;116H30[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1088 |
+
1088,1144187,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1089 |
+
1089,1145259,"TERMINAL",0,0,"[1;117H2[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1090 |
+
1090,1146282,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1091 |
+
1091,1147324,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1092 |
+
1092,1148405,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1093 |
+
1093,1149436,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d10[39;122H",,terminal_output
|
| 1094 |
+
1094,1150553,"TERMINAL",0,0,"[1;117H8[5;59H30[6d30[7d1[39;122H",,terminal_output
|
| 1095 |
+
1095,1151578,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1096 |
+
1096,1152706,"TERMINAL",0,0,"[1;116H40[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1097 |
+
1097,1153607,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1098 |
+
1098,1154656,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1099 |
+
1099,1155701,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1100 |
+
1100,1156753,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1101 |
+
1101,1157836,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1102 |
+
1102,1158849,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1103 |
+
1103,1159934,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d20[39;122H",,terminal_output
|
| 1104 |
+
1104,1161006,"TERMINAL",0,0,"[1;117H8[5;59H40[6d40[7d1[39;122H",,terminal_output
|
| 1105 |
+
1105,1162020,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1106 |
+
1106,1163148,"TERMINAL",0,0,"[1;116H50[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1107 |
+
1107,1164175,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1108 |
+
1108,1165194,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1109 |
+
1109,1166224,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1110 |
+
1110,1167245,"TERMINAL",0,0,"[1;117H4[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1111 |
+
1111,1168386,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1112 |
+
1112,1169398,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d30[39;122H",,terminal_output
|
| 1113 |
+
1113,1170379,"TERMINAL",0,0,"[1;117H8[5;59H50[6d50[7d1[39;122H",,terminal_output
|
| 1114 |
+
1114,1171439,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1115 |
+
1115,1172480,"TERMINAL",0,0,"[1;113H20:00[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1116 |
+
1116,1173550,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1117 |
+
1117,1174642,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1118 |
+
1118,1175630,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1119 |
+
1119,1176673,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1120 |
+
1120,1177761,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1121 |
+
1121,1178762,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1122 |
+
1122,1179840,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d40[39;122H",,terminal_output
|
| 1123 |
+
1123,1181070,"TERMINAL",0,0,"[1;117H8[5;57H7:00[6;57H7:00[7d1[39;122H",,terminal_output
|
| 1124 |
+
1124,1182096,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1125 |
+
1125,1183120,"TERMINAL",0,0,"[1;116H10[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1126 |
+
1126,1184149,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1127 |
+
1127,1185194,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1128 |
+
1128,1186246,"TERMINAL",0,0,"[1;117H3[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1129 |
+
1129,1187295,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1130 |
+
1130,1188354,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1131 |
+
1131,1189394,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7d50[39;122H",,terminal_output
|
| 1132 |
+
1132,1190538,"TERMINAL",0,0,"[1;117H8[5;59H10[6d10[7d1[39;122H",,terminal_output
|
| 1133 |
+
1133,1191495,"TERMINAL",0,0,"[1;117H9[5;60H1[6d1[7d2[39;122H",,terminal_output
|
| 1134 |
+
1134,1192643,"TERMINAL",0,0,"[1;116H20[5;60H2[6d2[7d3[39;122H",,terminal_output
|
| 1135 |
+
1135,1193669,"TERMINAL",0,0,"[1;117H1[5;60H3[6d3[7d4[39;122H",,terminal_output
|
| 1136 |
+
1136,1194707,"TERMINAL",0,0,"[1;117H2[5;60H4[6d4[7d5[39;122H",,terminal_output
|
| 1137 |
+
1137,1195719,"TERMINAL",0,0,"[1;117H3[5;60H5[6d5[7d6[39;122H",,terminal_output
|
| 1138 |
+
1138,1196797,"TERMINAL",0,0,"[1;117H4[5;60H6[6d6[7d7[39;122H",,terminal_output
|
| 1139 |
+
1139,1197823,"TERMINAL",0,0,"[1;117H5[5;60H7[6d7[7d8[39;122H",,terminal_output
|
| 1140 |
+
1140,1198873,"TERMINAL",0,0,"[1;117H6[5;60H8[6d8[7d9[39;122H",,terminal_output
|
| 1141 |
+
1141,1199938,"TERMINAL",0,0,"[1;117H7[5;60H9[6d9[7;57H3:00[39;122H",,terminal_output
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5e6eb49e-ed8b-4688-9226-2f7ce3bdd3501759657457038-2025_10_05-11.44.58.692/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-614215de-0f98-45e4-a49e-9e7d7e57cff91757422920029-2025_09_09-15.02.34.03/source.csv
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,5,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab
|
| 3 |
+
2,832,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab
|
| 4 |
+
3,1155,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"3:02:33 PM [info] Activating crowd-code\n3:02:34 PM [info] Recording started\n3:02:34 PM [info] Initializing git provider using file system watchers...\n3:02:34 PM [info] Git repository found\n3:02:34 PM [info] Git provider initialized successfully\n3:02:34 PM [info] Initial git state: [object Object]\n",Log,content
|
| 5 |
+
4,3766,"train_lam.py",0,0,"",python,tab
|
| 6 |
+
5,3771,"TERMINAL",0,0,"",,terminal_focus
|
| 7 |
+
6,14658,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command
|
| 8 |
+
7,136281,"TERMINAL",0,0,"bash",,terminal_focus
|
| 9 |
+
8,138228,"TERMINAL",0,0,"idling",,terminal_command
|
| 10 |
+
9,138301,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;50r(B[m[4l[?7h[H[2JEvery 1.0s: sinfo_t_idle[1;165Hhkn1990.localdomain: Tue Sep 9 15:04:52 2025[3;1HPartition dev_cpuonly[3;35H: 10 nodes idle\r[4dPartition cpuonly[4;35H: 26 nodes idle\r[5dPartition dev_accelerated[5;35H:\t 0 nodes idle\r[6dPartition accelerated[6;35H: 29 nodes idle\r[7dPartition dev_accelerated-h100 :\t 0 nodes idle\r[8dPartition accelerated-h100[8;35H:\t 0 nodes idle\r[9dPartition large[9;35H:\t 0 nodes idle\r[10dPartition accelerated-h200[10;35H:\t 0 nodes idle[50;209H",,terminal_output
|
| 11 |
+
10,139401,"TERMINAL",0,0,"[1;204H3[50d\t",,terminal_output
|
| 12 |
+
11,140419,"TERMINAL",0,0,"[1;204H4[50d\t",,terminal_output
|
| 13 |
+
12,140811,"TERMINAL",0,0,"[50;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output
|
| 14 |
+
13,146318,"TERMINAL",0,0,"_node",,terminal_command
|
| 15 |
+
14,146350,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output
|
| 16 |
+
15,150256,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command
|
| 17 |
+
16,150320,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3480467\r\n",,terminal_output
|
| 18 |
+
17,150455,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output
|
| 19 |
+
18,177507,"TERMINAL",0,0,"salloc: Nodes hkn0818 are ready for job\r\n",,terminal_output
|
| 20 |
+
19,178680,"TERMINAL",0,0,"]0;tum_cte0515@hkn0818:~/Projects/jasmine[?2004h[tum_cte0515@hkn0818 jasmine]$ ",,terminal_output
|
| 21 |
+
20,184841,"TERMINAL",0,0,"s",,terminal_output
|
| 22 |
+
21,185268,"TERMINAL",0,0,"[K",,terminal_output
|
| 23 |
+
22,185837,"TERMINAL",0,0,"cd",,terminal_output
|
| 24 |
+
23,186104,"TERMINAL",0,0,".",,terminal_output
|
| 25 |
+
24,186235,"TERMINAL",0,0,".",,terminal_output
|
| 26 |
+
25,186821,"TERMINAL",0,0,"[K",,terminal_output
|
| 27 |
+
26,187026,"TERMINAL",0,0,"[K",,terminal_output
|
| 28 |
+
27,187154,"TERMINAL",0,0," ",,terminal_output
|
| 29 |
+
28,187287,"TERMINAL",0,0,".",,terminal_output
|
| 30 |
+
29,187418,"TERMINAL",0,0,".",,terminal_output
|
| 31 |
+
30,187533,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0818:~/Projects[?2004h[tum_cte0515@hkn0818 Projects]$ ",,terminal_output
|
| 32 |
+
31,187833,"TERMINAL",0,0,"c",,terminal_output
|
| 33 |
+
32,187906,"TERMINAL",0,0,"d",,terminal_output
|
| 34 |
+
33,188023,"TERMINAL",0,0," ",,terminal_output
|
| 35 |
+
34,188446,"TERMINAL",0,0,"j",,terminal_output
|
| 36 |
+
35,188581,"TERMINAL",0,0,"a",,terminal_output
|
| 37 |
+
36,189081,"TERMINAL",0,0,"",,terminal_output
|
| 38 |
+
37,189470,"TERMINAL",0,0,"f",,terminal_output
|
| 39 |
+
38,189596,"TERMINAL",0,0,"ar/",,terminal_output
|
| 40 |
+
39,190052,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0818:~/Projects/jafar[?2004h[tum_cte0515@hkn0818 jafar]$ ",,terminal_output
|
| 41 |
+
40,192215,"TERMINAL",0,0,"s",,terminal_output
|
| 42 |
+
41,192295,"TERMINAL",0,0,"o",,terminal_output
|
| 43 |
+
42,192360,"TERMINAL",0,0,"u",,terminal_output
|
| 44 |
+
43,192442,"TERMINAL",0,0,"r",,terminal_output
|
| 45 |
+
44,192712,"TERMINAL",0,0,"c",,terminal_output
|
| 46 |
+
45,192823,"TERMINAL",0,0,"e ",,terminal_output
|
| 47 |
+
46,193263,"TERMINAL",0,0,".",,terminal_output
|
| 48 |
+
47,193438,"TERMINAL",0,0,"v",,terminal_output
|
| 49 |
+
48,193680,"TERMINAL",0,0,"env/",,terminal_output
|
| 50 |
+
49,193926,"TERMINAL",0,0,"b",,terminal_output
|
| 51 |
+
50,193991,"TERMINAL",0,0,"in/",,terminal_output
|
| 52 |
+
51,194269,"TERMINAL",0,0,"ac",,terminal_output
|
| 53 |
+
52,194456,"TERMINAL",0,0,"tivate",,terminal_output
|
| 54 |
+
53,194719,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0818:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0818 jafar]$ ",,terminal_output
|
| 55 |
+
54,199639,"TERMINAL",0,0,"c",,terminal_output
|
| 56 |
+
55,200167,"TERMINAL",0,0,"[K",,terminal_output
|
| 57 |
+
56,200333,"TERMINAL",0,0,"c",,terminal_output
|
| 58 |
+
57,200457,"TERMINAL",0,0,"u",,terminal_output
|
| 59 |
+
58,200651,"TERMINAL",0,0,"r",,terminal_output
|
| 60 |
+
59,200855,"TERMINAL",0,0,"s",,terminal_output
|
| 61 |
+
60,200929,"TERMINAL",0,0,"o",,terminal_output
|
| 62 |
+
61,201054,"TERMINAL",0,0,"r",,terminal_output
|
| 63 |
+
62,201137,"TERMINAL",0,0," ",,terminal_output
|
| 64 |
+
63,201325,"TERMINAL",0,0,".",,terminal_output
|
| 65 |
+
64,203292,"TERMINAL",0,0,"[K",,terminal_output
|
| 66 |
+
65,203449,"TERMINAL",0,0,"[K",,terminal_output
|
| 67 |
+
66,204518,"TERMINAL",0,0,"bash",,terminal_focus
|
| 68 |
+
67,209503,"TERMINAL",0,0,"cursor ../jafar/",,terminal_command
|
| 69 |
+
68,209549,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 70 |
+
69,209647,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output
|
| 71 |
+
70,296145,"TERMINAL",0,0,"srun",,terminal_focus
|
| 72 |
+
71,297158,"TERMINAL",0,0,"sh",,terminal_output
|
| 73 |
+
72,297269,"TERMINAL",0,0," ",,terminal_output
|
| 74 |
+
73,297457,"TERMINAL",0,0,"s",,terminal_output
|
| 75 |
+
74,297642,"TERMINAL",0,0,"a",,terminal_output
|
| 76 |
+
75,297756,"TERMINAL",0,0,"m",,terminal_output
|
| 77 |
+
76,298119,"TERMINAL",0,0,"[K",,terminal_output
|
| 78 |
+
77,298266,"TERMINAL",0,0,"[K",,terminal_output
|
| 79 |
+
78,298379,"TERMINAL",0,0,"[K",,terminal_output
|
| 80 |
+
79,298493,"TERMINAL",0,0,"[K",,terminal_output
|
| 81 |
+
80,298649,"TERMINAL",0,0,"[K",,terminal_output
|
| 82 |
+
81,298774,"TERMINAL",0,0,"[K",,terminal_output
|
| 83 |
+
82,298990,"TERMINAL",0,0,"p",,terminal_output
|
| 84 |
+
83,299131,"TERMINAL",0,0,"y",,terminal_output
|
| 85 |
+
84,299351,"TERMINAL",0,0,"t",,terminal_output
|
| 86 |
+
85,299403,"TERMINAL",0,0,"h",,terminal_output
|
| 87 |
+
86,299606,"TERMINAL",0,0,"o",,terminal_output
|
| 88 |
+
87,299785,"TERMINAL",0,0,"n",,terminal_output
|
| 89 |
+
88,299839,"TERMINAL",0,0," ",,terminal_output
|
| 90 |
+
89,300340,"TERMINAL",0,0,"[K",,terminal_output
|
| 91 |
+
90,300490,"TERMINAL",0,0,"[K",,terminal_output
|
| 92 |
+
91,300640,"TERMINAL",0,0,"[K",,terminal_output
|
| 93 |
+
92,300793,"TERMINAL",0,0,"[K",,terminal_output
|
| 94 |
+
93,300915,"TERMINAL",0,0,"[K",,terminal_output
|
| 95 |
+
94,301091,"TERMINAL",0,0,"[K",,terminal_output
|
| 96 |
+
95,301270,"TERMINAL",0,0,"[K",,terminal_output
|
| 97 |
+
96,304897,"slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=10:00:00\n#SBATCH --partition=large\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\nsource .venv/bin/activate\n\npython generate_dataset.py \\n --min_episode_length 1000 \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test \\n --num_episodes 50",shellscript,tab
|
| 98 |
+
97,315511,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab
|
| 99 |
+
98,317803,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1257,0,"",shellscript,selection_mouse
|
| 100 |
+
99,317823,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1256,0,"",shellscript,selection_command
|
| 101 |
+
100,318630,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1231,0,"",shellscript,selection_mouse
|
| 102 |
+
101,318645,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1230,0,"",shellscript,selection_command
|
| 103 |
+
102,318788,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1230,1,"4",shellscript,selection_mouse
|
| 104 |
+
103,318793,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1231,0,"",shellscript,selection_command
|
| 105 |
+
104,318985,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1229,2,".4",shellscript,selection_mouse
|
| 106 |
+
105,318986,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1224,7,"da/12.4",shellscript,selection_mouse
|
| 107 |
+
106,318987,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1215,16," devel/cuda/12.4",shellscript,selection_mouse
|
| 108 |
+
107,318987,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1212,19,"oad devel/cuda/12.4",shellscript,selection_mouse
|
| 109 |
+
108,318988,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1210,21,"nload devel/cuda/12.4",shellscript,selection_mouse
|
| 110 |
+
109,318988,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1207,24,"e unload devel/cuda/12.4",shellscript,selection_mouse
|
| 111 |
+
110,318989,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1205,26,"ule unload devel/cuda/12.4",shellscript,selection_mouse
|
| 112 |
+
111,318990,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1173,58,"odule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4",shellscript,selection_mouse
|
| 113 |
+
112,318990,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1172,59,"module unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4",shellscript,selection_mouse
|
| 114 |
+
113,323244,"TERMINAL",0,0,"[7mmodule unload mpi/openmpi/5.0[27m\r\n\r[7mmodule unload devel/cuda/12.4[27m",,terminal_output
|
| 115 |
+
114,323792,"TERMINAL",0,0,"[A[C[C[C[C[C[C[C[Cmodule unload mpi/openmpi/5.0\r\n\rmodule unload devel/cuda/12.4\r\n[?2004l\r",,terminal_output
|
| 116 |
+
115,323980,"TERMINAL",0,0,"]0;tum_cte0515@hkn0818:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0818 jafar]$ ",,terminal_output
|
| 117 |
+
116,325638,"TERMINAL",0,0,"p",,terminal_output
|
| 118 |
+
117,325835,"TERMINAL",0,0,"y",,terminal_output
|
| 119 |
+
118,326049,"TERMINAL",0,0,"th",,terminal_output
|
| 120 |
+
119,326267,"TERMINAL",0,0,"o",,terminal_output
|
| 121 |
+
120,326445,"TERMINAL",0,0,"n ",,terminal_output
|
| 122 |
+
121,326587,"TERMINAL",0,0,"s",,terminal_output
|
| 123 |
+
122,326984,"TERMINAL",0,0,"",,terminal_output
|
| 124 |
+
123,327730,"TERMINAL",0,0,"a",,terminal_output
|
| 125 |
+
124,327897,"TERMINAL",0,0,"m",,terminal_output
|
| 126 |
+
125,327962,"TERMINAL",0,0,"ple.py ",,terminal_output
|
| 127 |
+
126,479201,"TERMINAL",0,0,"[K",,terminal_output
|
| 128 |
+
127,479313,"TERMINAL",0,0,"[K",,terminal_output
|
| 129 |
+
128,479457,"TERMINAL",0,0,"",,terminal_output
|
| 130 |
+
129,479719,"TERMINAL",0,0,"s",,terminal_output
|
| 131 |
+
130,479798,"TERMINAL",0,0,"h",,terminal_output
|
| 132 |
+
131,479946,"TERMINAL",0,0," ",,terminal_output
|
| 133 |
+
132,480208,"TERMINAL",0,0,"[7mslurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh[27m",,terminal_output
|
| 134 |
+
133,480569,"TERMINAL",0,0,"\r[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[Cslurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh\r\n[?2004l\r\r\nNote: the module ""mpi/openmpi/5.0"" cannot be unloaded because it was not loaded.\r\n\r\n",,terminal_output
|
| 135 |
+
134,480642,"TERMINAL",0,0,"\r\nNote: the module ""devel/cuda/12.4"" cannot be unloaded because it was not loaded.\r\n\r\n",,terminal_output
|
| 136 |
+
135,495073,"TERMINAL",0,0,"[91m╭─[0m[91m [0m[1;91mParsing error[0m[91m [0m[91m───────────────────────────[0m[91m─╮[0m\r\n[91m│[0m Argument --checkpoint: expected 1 argument [91m│[0m\r\n[91m│[0m [31m──────────────────────────────────────────[0m [91m│[0m\r\n[91m│[0m For full helptext, run [1msample.py --help[0m [91m│[0m\r\n[91m╰────────────────────────────────────────────╯[0m\r\n",,terminal_output
|
| 137 |
+
136,495881,"TERMINAL",0,0,"]0;tum_cte0515@hkn0818:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0818 jafar]$ ",,terminal_output
|
| 138 |
+
137,515622,"TERMINAL",0,0,"bash",,terminal_focus
|
| 139 |
+
138,519030,"TERMINAL",0,0,"cd $ws_dir",,terminal_command
|
| 140 |
+
139,519130,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output
|
| 141 |
+
140,519620,"TERMINAL",0,0,"ls",,terminal_command
|
| 142 |
+
141,519672,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 143 |
+
142,519736,"TERMINAL",0,0,"[0m[01;34mcheckpoints[0m count_items.sh [01;34mdata[0m [01;34mdata_atari[0m [01;34mdata_coinrun[0m [01;36mdata_minecraft[0m [01;34mdata_new[0m [01;34mhuggingface[0m [01;34mlogs[0m possibly_corrupt_files_in_this_workspace.txt [01;34mscripts[0m\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output
|
| 144 |
+
143,521863,"TERMINAL",0,0,"cd checkpoints/",,terminal_command
|
| 145 |
+
144,590615,"TERMINAL",0,0,"srun",,terminal_focus
|
| 146 |
+
145,591123,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",,terminal_output
|
| 147 |
+
146,591321,"TERMINAL",0,0,"\r\n[?2004l\r\r\nNote: the module ""mpi/openmpi/5.0"" cannot be unloaded because it was not loaded.\r\n\r\n\r\nNote: the module ""devel/cuda/12.4"" cannot be unloaded because it was not loaded.\r\n\r\n",,terminal_output
|
| 148 |
+
147,605029,"TERMINAL",0,0,"2025-09-09 15:12:38.956720: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 149 |
+
148,606875,"TERMINAL",0,0,"2025-09-09 15:12:40.801380: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 150 |
+
149,613803,"TERMINAL",0,0,"2025-09-09 15:12:47.615654: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 151 |
+
150,620537,"TERMINAL",0,0,"2025-09-09 15:12:54.464597: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 152 |
+
151,623582,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output
|
| 153 |
+
152,632767,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output
|
| 154 |
+
153,636609,"TERMINAL",0,0,"2025-09-09 15:13:10.533420: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 155 |
+
154,646296,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output
|
| 156 |
+
155,650947,"TERMINAL",0,0,"2025-09-09 15:13:24.859501: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 157 |
+
156,652638,"TERMINAL",0,0,"2025-09-09 15:13:26.514152: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 158 |
+
157,660390,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output
|
| 159 |
+
158,665267,"TERMINAL",0,0,"2025-09-09 15:13:39.121931: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 160 |
+
159,666778,"TERMINAL",0,0,"2025-09-09 15:13:40.677335: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 161 |
+
160,674375,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output
|
| 162 |
+
161,679274,"TERMINAL",0,0,"2025-09-09 15:13:53.202302: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 163 |
+
162,680926,"TERMINAL",0,0,"2025-09-09 15:13:54.848446: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 164 |
+
163,688985,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output
|
| 165 |
+
164,694378,"TERMINAL",0,0,"2025-09-09 15:14:08.272770: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 166 |
+
165,695951,"TERMINAL",0,0,"2025-09-09 15:14:09.862514: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 167 |
+
166,703627,"TERMINAL",0,0,"Frame 6\r\n",,terminal_output
|
| 168 |
+
167,709262,"TERMINAL",0,0,"2025-09-09 15:14:23.141537: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 169 |
+
168,710977,"TERMINAL",0,0,"2025-09-09 15:14:24.889095: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 170 |
+
169,718590,"TERMINAL",0,0,"Frame 7\r\n",,terminal_output
|
| 171 |
+
170,723917,"TERMINAL",0,0,"2025-09-09 15:14:37.836699: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 172 |
+
171,725556,"TERMINAL",0,0,"2025-09-09 15:14:39.463510: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 173 |
+
172,733384,"TERMINAL",0,0,"Frame 8\r\n",,terminal_output
|
| 174 |
+
173,738800,"TERMINAL",0,0,"2025-09-09 15:14:52.727758: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 175 |
+
174,740402,"TERMINAL",0,0,"2025-09-09 15:14:54.299840: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 176 |
+
175,749092,"TERMINAL",0,0,"Frame 9\r\n",,terminal_output
|
| 177 |
+
176,754734,"TERMINAL",0,0,"2025-09-09 15:15:08.632875: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 178 |
+
177,756531,"TERMINAL",0,0,"2025-09-09 15:15:10.383487: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 179 |
+
178,764967,"TERMINAL",0,0,"Frame 10\r\n",,terminal_output
|
| 180 |
+
179,770735,"TERMINAL",0,0,"2025-09-09 15:15:24.664649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 181 |
+
180,772393,"TERMINAL",0,0,"2025-09-09 15:15:26.310567: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 182 |
+
181,781170,"TERMINAL",0,0,"Frame 11\r\n",,terminal_output
|
| 183 |
+
182,787078,"TERMINAL",0,0,"2025-09-09 15:15:40.965308: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 184 |
+
183,788711,"TERMINAL",0,0,"2025-09-09 15:15:42.606920: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 185 |
+
184,797254,"TERMINAL",0,0,"Frame 12\r\n",,terminal_output
|
| 186 |
+
185,803163,"TERMINAL",0,0,"2025-09-09 15:15:56.991416: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 187 |
+
186,804697,"TERMINAL",0,0,"2025-09-09 15:15:58.566806: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 188 |
+
187,813965,"TERMINAL",0,0,"Frame 13\r\n",,terminal_output
|
| 189 |
+
188,819878,"TERMINAL",0,0,"2025-09-09 15:16:13.806078: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 190 |
+
189,821645,"TERMINAL",0,0,"2025-09-09 15:16:15.573480: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 191 |
+
190,830300,"TERMINAL",0,0,"Frame 14\r\n",,terminal_output
|
| 192 |
+
191,835874,"TERMINAL",0,0,"2025-09-09 15:16:29.796506: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 193 |
+
192,837625,"TERMINAL",0,0,"2025-09-09 15:16:31.539961: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 194 |
+
193,846471,"TERMINAL",0,0,"Frame 15\r\n",,terminal_output
|
| 195 |
+
194,852495,"TERMINAL",0,0,"2025-09-09 15:16:46.418156: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 196 |
+
195,854207,"TERMINAL",0,0,"2025-09-09 15:16:48.135477: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output
|
| 197 |
+
196,865530,"TERMINAL",0,0,"SSIM: 0.6239567399024963\r\n",,terminal_output
|
| 198 |
+
197,869800,"TERMINAL",0,0,"]0;tum_cte0515@hkn0818:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0818 jafar]$ ",,terminal_output
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-629e4b25-2201-4663-a7f2-936116295b151757499356136-2025_09_10-12.16.59.652/source.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,5,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nimport optax\nimport orbax\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n name: str = """"\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n wandb_id: str = """"\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""dropout_rng""]}\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n rng = jax.random.PRNGKey(args.seed)\n if args.log:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_resolution, args.image_resolution, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n init_params = restore_genie_components(\n init_params, args.tokenizer_checkpoint, args.lam_checkpoint\n )\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n # --- TRAIN LOOP ---\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n step = 0\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _mask_rng = jax.random.split(rng, 3)\n inputs = dict(\n videos=videos,\n action=jnp.zeros((args.batch_size, args.seq_len), dtype=jnp.float32),\n dropout_rng=_rng,\n mask_rng=_mask_rng,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[15])),\n recon=wandb.Image(np.asarray(recon_seq[15])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab
|
| 3 |
+
2,2706,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:16:59 PM [info] Activating crowd-code\n12:16:59 PM [info] Recording started\n12:16:59 PM [info] Initializing git provider using file system watchers...\n12:17:00 PM [info] Git repository found\n12:17:00 PM [info] Git provider initialized successfully\n12:17:00 PM [info] Initial git state: [object Object]\n",Log,tab
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6488ed25-a64b-4b96-ae90-de59d09eaf2d1759672300294-2025_10_05-15.52.33.721/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-65955a28-c516-4123-abaf-6681358bdea31753192468219-2025_07_22-15.55.21.96/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6aec90c8-8d95-4bad-afcb-92c28c6ff5241753889052956-2025_07_30-17.24.41.914/source.csv
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
2,825,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:24:41 PM [info] Activating crowd-code\n5:24:41 PM [info] Recording started\n5:24:41 PM [info] Initializing git provider using file system watchers...\n",Log,tab
|
| 3 |
+
3,828,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"5:24:42 PM [info] Git repository found\n5:24:42 PM [info] Git provider initialized successfully\n",Log,content
|
| 4 |
+
4,1007,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"5:24:42 PM [info] Initial git state: [object Object]\n",Log,content
|
| 5 |
+
5,3381,"TERMINAL",0,0,"git branch",,terminal_command
|
| 6 |
+
6,3453,"TERMINAL",0,0,"]633;E;2025-07-30 17:24:45 git branch;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1h=\r add-wandb-name-and-tags[m[m\r\n before-nnx[m[m\r\n causal-st-transformer[m[m\r\n causal-transformer-dynamics-model[m[m\r\n convert-to-jax-array-in-iter[m[m\r\n correct-batched-sampling[m[m\r\n dev[m[m\r\n dont-let-tf-see-gpu[m[m\r\n feat/explicit-image-dims[m[m\r\n fix-action-padding-lam-future-information-access[m[m\r\n fix-sampling[m[m\r\n fix-transformer-forwardpass[m[m\r\n fix/spatiotemporal-pe-once-in-STTransformer[m[m\r\n grad-norm-log-and-clip[m[m\r\n grain-dataloader[m[m\r\n logging-variants[m[m\r\n lr-schedules[m[m\r\n main[m[m\r\n maskgit-different-maskprob-per-sample[m[m\r\n metrics-logging-for-dynamics-model[m[m\r\n monkey-patch[m[m\r\n* [32mnew-arch-sampling[m[m\r\n preprocess_video[m[m\r\n refactor-tmp[m[m\r\n revised-dataloader[m[m\r\n runner[m[m\r\n runner-grain[m[m\r\n sample-from-different-topologies[m[m\r\n speedup-tfrecord-preprocessing[m[m\r\n tmp[m[m\r\n:[K",,terminal_output
|
| 7 |
+
7,8802,"TERMINAL",0,0,"[H[2J[H[H[2J[H add-wandb-name-and-tags[m[m\r\n before-nnx[m[m\r\n causal-st-transformer[m[m\r\n causal-transformer-dynamics-model[m[m\r\n convert-to-jax-array-in-iter[m[m\r\n correct-batched-sampling[m[m\r\n dev[m[m\r\n dont-let-tf-see-gpu[m[m\r\n feat/explicit-image-dims[m[m\r\n fix-action-padding-lam-future-information-access[m[m\r\n fix-sampling[m[m\r\n fix-transformer-forwardpass[m[m\r\n fix/spatiotemporal-pe-once-in-STTransformer[m[m\r\n grad-norm-log-and-clip[m[m\r\n grain-dataloader[m[m\r\n logging-variants[m[m\r\n lr-schedules[m[m\r\n main[m[m\r\n maskgit-different-maskprob-per-sample[m[m\r\n metrics-logging-for-dynamics-model[m[m\r\n monkey-patch[m[m\r\n* [32mnew-arch-sampling[m[m\r\n preprocess_video[m[m\r\n refactor-tmp[m[m\r\n revised-dataloader[m[m\r\n runner[m[m\r\n runner-grain[m[m\r\n sample-from-different-topologies[m[m\r\n speedup-tfrecord-preprocessing[m[m\r\n tmp[m[m\r\n:[K",,terminal_output
|
| 8 |
+
8,9676,"TERMINAL",0,0,"\r[K[?1l>\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;130",,terminal_output
|
| 9 |
+
9,12549,"TERMINAL",0,0,"git checkout fix/spatiotemporal-pe-once-in-STTransformer",,terminal_command
|
| 10 |
+
10,12581,"TERMINAL",0,0,"]633;E;2025-07-30 17:24:54 git checkout fix/spatiotemporal-pe-once-in-STTransformer;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output
|
| 11 |
+
11,12867,"TERMINAL",0,0,"g",,terminal_output
|
| 12 |
+
12,12874,"TERMINAL",0,0,"[?25l[31;2Hi[31;3H[?25h",,terminal_output
|
| 13 |
+
13,12941,"TERMINAL",0,0,"[?25l[31;3H[X[0mSwitched to branch 'fix/spatiotemporal-pe-once-in-STTransformer'\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0[?25h",,terminal_output
|
| 14 |
+
14,13779,"TERMINAL",0,0,"git diff",,terminal_command
|
| 15 |
+
15,15343,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"Switched from branch 'new-arch-sampling' to 'fix/spatiotemporal-pe-once-in-STTransformer'",Log,git_branch_checkout
|
| 16 |
+
16,15499,"extension-output-pdoom-org.crowd-code-#1-crowd-code",298,0,"5:24:57 PM [info] Branch checkout detected: new-arch-sampling -> fix/spatiotemporal-pe-once-in-STTransformer\n5:24:57 PM [info] Recording git checkout: Switched from branch 'new-arch-sampling' to 'fix/spatiotemporal-pe-once-in-STTransformer'\n5:24:57 PM [info] Resetting file cache due to branch checkout\n",Log,content
|
| 17 |
+
17,16601,"TERMINAL",0,0,"git checkout main",,terminal_command
|
| 18 |
+
18,16637,"TERMINAL",0,0,"]633;E;2025-07-30 17:24:58 git checkout main;adbf53fe-397b-40d3-9339-94ea79afad56]633;CSwitched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output
|
| 19 |
+
19,20343,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"Switched from branch 'fix/spatiotemporal-pe-once-in-STTransformer' to 'main'",Log,git_branch_checkout
|
| 20 |
+
20,20470,"extension-output-pdoom-org.crowd-code-#1-crowd-code",601,0,"5:25:02 PM [info] Branch checkout detected: fix/spatiotemporal-pe-once-in-STTransformer -> main\n5:25:02 PM [info] Recording git checkout: Switched from branch 'fix/spatiotemporal-pe-once-in-STTransformer' to 'main'\n5:25:02 PM [info] Resetting file cache due to branch checkout\n",Log,content
|
| 21 |
+
21,78754,"TERMINAL",0,0,"git checkout fix/spatiotemporal-pe-once-in-STTransformer",,terminal_command
|
| 22 |
+
22,80243,"TERMINAL",0,0,"git push",,terminal_command
|
| 23 |
+
23,80344,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"Switched from branch 'main' to 'fix/spatiotemporal-pe-once-in-STTransformer'",Log,git_branch_checkout
|
| 24 |
+
24,80522,"extension-output-pdoom-org.crowd-code-#1-crowd-code",878,0,"5:26:02 PM [info] Branch checkout detected: main -> fix/spatiotemporal-pe-once-in-STTransformer\n5:26:02 PM [info] Recording git checkout: Switched from branch 'main' to 'fix/spatiotemporal-pe-once-in-STTransformer'\n5:26:02 PM [info] Resetting file cache due to branch checkout\n",Log,content
|
| 25 |
+
25,83258,"TERMINAL",0,0,"git push --set-upstream origin fix/spatiotemporal-pe-once-in-STTransformer",,terminal_command
|
| 26 |
+
26,83333,"TERMINAL",0,0,"]633;E;2025-07-30 17:26:05 git push --set-upstream origin fix/spatiotemporal-pe-once-in-STTransformer;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output
|
| 27 |
+
27,84677,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 870 bytes | 870.00 KiB/s, done.\r\nTotal 4 (delta 3), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output
|
| 28 |
+
28,84825,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/3)[K\rremote: Resolving deltas: 33% (1/3)[K\rremote: Resolving deltas: 66% (2/3)[K\rremote: Resolving deltas: 100% (3/3)[K\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.[K\r\nremote: This repository moved. Please use the new location:[K\r\nremote: git@github.com:p-doom/jasmine.git[K\r\n",,terminal_output
|
| 29 |
+
29,84974,"TERMINAL",0,0,"remote: \r\nremote: Create a pull request for 'fix/spatiotemporal-pe-once-in-STTransformer' on GitHub by visiting:[K\r\nremote: https://github.com/p-doom/jasmine/pull/new/fix/spatiotemporal-pe-once-in-STTransformer[K\r\nremote: \r\n",,terminal_output
|
| 30 |
+
30,85061,"TERMINAL",0,0,"To github.com:p-doom/jafar.git\r\n * [new branch] fix/spatiotemporal-pe-once-in-STTransformer -> fix/spatiotemporal-pe-once-in-STTransformer\r\nbranch 'fix/spatiotemporal-pe-once-in-STTransformer' set up to track 'origin/fix/spatiotemporal-pe-once-in-STTransformer'.\r\n",,terminal_output
|
| 31 |
+
31,85082,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output
|
| 32 |
+
32,167394,"genie.py",0,0,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n D: B * T * N\n E: B * (T - 1)\n """"""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n latent_actions_E = batch[""latent_actions""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_BSN, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n\n # --- Construct + encode video ---\n vid_embed_BSNM = self.dynamics.patch_embed(token_idxs_BSN)\n mask_token_111M = self.dynamics.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, 1))\n act_embed_BSm1M = self.dynamics.action_up(action_tokens_BSm1L)\n # FIXME (f.srambical): We must not pad the actions, but remove the last frame (https://github.com/p-doom/jasmine/issues/122)\n vid_embed_BSNM += jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = self.dynamics.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) > num_unmasked_tokens\n sorted_idxs_BSN = jnp.argsort(final_token_probs_BSN, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_N))\n new_mask_BSN = mask_update_fn(mask_BSN, sorted_idxs_BSN)\n\n new_carry = (rng, token_idxs_BSN, new_mask_BSN, action_tokens_EL)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs_BSN = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask_S = jnp.arange(seq_len) >= step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output = self.lam.vq_encode(video_BTHWC, training=training)\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, dummy_tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n optimizer.model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, dummy_tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n optimizer.model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del optimizer.model.lam.decoder\n lam_checkpoint_manager.close()\n\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab
|
| 33 |
+
33,168545,"genie.py",6235,0,"",python,selection_command
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-761e4728-7320-4c5d-bc55-ad231839bb781753709851371-2025_07_28-15.39.08.844/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-76d20b24-d9be-4730-bca0-3f5c7d0460a01758996810958-2025_09_27-20.14.06.310/source.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c8743a1-f55a-4e45-b7aa-0b3df3c9f3c91752835699286-2025_07_18-12.49.02.294/source.csv
ADDED
|
@@ -0,0 +1,394 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,4,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:,:,1:]\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)",python,tab
|
| 3 |
+
2,402,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:49:02 PM [info] Activating crowd-code\n12:49:02 PM [info] Recording started\n12:49:02 PM [info] Initializing git provider using file system watchers...\n12:49:02 PM [info] Git repository found\n12:49:02 PM [info] Git provider initialized successfully\n12:49:02 PM [info] Initial git state: [object Object]\n",Log,tab
|
| 4 |
+
3,3198,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command
|
| 5 |
+
4,3233,"TERMINAL",0,0,"]633;E;2025-07-18 12:49:05 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;3a42ff33-fe06-4226-9417-7f10804f0a18]633;C",,terminal_output
|
| 6 |
+
5,3297,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output
|
| 7 |
+
6,4955,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab
|
| 8 |
+
7,11679,"models/lam.py",2162,0,"",python,selection_mouse
|
| 9 |
+
8,11804,"models/lam.py",2159,3,"jnp",python,selection_mouse
|
| 10 |
+
9,14630,"models/lam.py",2060,0,"",python,selection_mouse
|
| 11 |
+
10,14633,"models/lam.py",2059,0,"",python,selection_command
|
| 12 |
+
11,15171,"models/lam.py",2107,0,"",python,selection_mouse
|
| 13 |
+
12,15318,"models/lam.py",2096,20,"video_action_patches",python,selection_mouse
|
| 14 |
+
13,71040,"models/lam.py",2782,0,"",python,selection_mouse
|
| 15 |
+
14,71169,"models/lam.py",2778,14,"padded_patches",python,selection_mouse
|
| 16 |
+
15,72501,"models/lam.py",2696,0,"",python,selection_mouse
|
| 17 |
+
16,72610,"models/lam.py",2695,2,"((",python,selection_mouse
|
| 18 |
+
17,79356,"models/lam.py",2538,0,"",python,selection_mouse
|
| 19 |
+
18,79489,"models/lam.py",2534,8,"patchify",python,selection_mouse
|
| 20 |
+
19,80076,"models/lam.py",2529,0,"",python,selection_mouse
|
| 21 |
+
20,80227,"models/lam.py",2524,7,"patches",python,selection_mouse
|
| 22 |
+
21,81420,"models/lam.py",2529,0,"",python,selection_mouse
|
| 23 |
+
22,82156,"models/lam.py",2524,7,"patches",python,selection_mouse
|
| 24 |
+
23,88062,"models/lam.py",2669,0,"",python,selection_mouse
|
| 25 |
+
24,88219,"models/lam.py",2663,14,"padded_patches",python,selection_mouse
|
| 26 |
+
25,95005,"models/lam.py",2831,0,"",python,selection_mouse
|
| 27 |
+
26,95165,"models/lam.py",2831,6,"action",python,selection_mouse
|
| 28 |
+
27,95769,"models/lam.py",2669,0,"",python,selection_mouse
|
| 29 |
+
28,95909,"models/lam.py",2663,14,"padded_patches",python,selection_mouse
|
| 30 |
+
29,96023,"models/lam.py",2655,72," padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n",python,selection_mouse
|
| 31 |
+
30,96634,"models/lam.py",2669,0,"",python,selection_mouse
|
| 32 |
+
31,96635,"models/lam.py",2663,14,"padded_patches",python,selection_mouse
|
| 33 |
+
32,156184,"models/lam.py",2933,0,"",python,selection_mouse
|
| 34 |
+
33,156229,"models/lam.py",2932,0,"",python,selection_command
|
| 35 |
+
34,156811,"models/lam.py",3093,0,"",python,selection_mouse
|
| 36 |
+
35,156852,"models/lam.py",3092,0,"",python,selection_command
|
| 37 |
+
36,157452,"models/lam.py",3170,0,"",python,selection_mouse
|
| 38 |
+
37,157453,"models/lam.py",3169,0,"",python,selection_command
|
| 39 |
+
38,158312,"models/lam.py",2933,0,"",python,selection_mouse
|
| 40 |
+
39,158313,"models/lam.py",2932,0,"",python,selection_command
|
| 41 |
+
40,223766,"models/lam.py",2500,0,"",python,selection_mouse
|
| 42 |
+
41,223922,"models/lam.py",2499,6,"videos",python,selection_mouse
|
| 43 |
+
42,224169,"models/lam.py",2499,12,"videos.shape",python,selection_mouse
|
| 44 |
+
43,224209,"models/lam.py",2499,13,"videos.shape[",python,selection_mouse
|
| 45 |
+
44,224256,"models/lam.py",2499,14,"videos.shape[:",python,selection_mouse
|
| 46 |
+
45,224257,"models/lam.py",2499,15,"videos.shape[:2",python,selection_mouse
|
| 47 |
+
46,224301,"models/lam.py",2499,16,"videos.shape[:2]",python,selection_mouse
|
| 48 |
+
47,224636,"models/lam.py",2515,0,"",python,selection_mouse
|
| 49 |
+
48,224640,"models/lam.py",2514,0,"",python,selection_command
|
| 50 |
+
49,225043,"models/lam.py",2515,0,"",python,selection_mouse
|
| 51 |
+
50,225045,"models/lam.py",2514,0,"",python,selection_command
|
| 52 |
+
51,225205,"models/lam.py",2514,1,"]",python,selection_mouse
|
| 53 |
+
52,225207,"models/lam.py",2515,0,"",python,selection_command
|
| 54 |
+
53,225270,"models/lam.py",2514,1,"]",python,selection_mouse
|
| 55 |
+
54,225271,"models/lam.py",2510,5,"e[:2]",python,selection_mouse
|
| 56 |
+
55,225271,"models/lam.py",2509,6,"pe[:2]",python,selection_mouse
|
| 57 |
+
56,225272,"models/lam.py",2508,7,"ape[:2]",python,selection_mouse
|
| 58 |
+
57,225300,"models/lam.py",2506,9,"shape[:2]",python,selection_mouse
|
| 59 |
+
58,225348,"models/lam.py",2505,10,".shape[:2]",python,selection_mouse
|
| 60 |
+
59,225392,"models/lam.py",2504,11,"s.shape[:2]",python,selection_mouse
|
| 61 |
+
60,225392,"models/lam.py",2503,12,"os.shape[:2]",python,selection_mouse
|
| 62 |
+
61,225393,"models/lam.py",2502,13,"eos.shape[:2]",python,selection_mouse
|
| 63 |
+
62,225395,"models/lam.py",2501,14,"deos.shape[:2]",python,selection_mouse
|
| 64 |
+
63,225485,"models/lam.py",2500,15,"ideos.shape[:2]",python,selection_mouse
|
| 65 |
+
64,225984,"models/lam.py",2500,0,"",python,selection_mouse
|
| 66 |
+
65,225985,"models/lam.py",2499,6,"videos",python,selection_mouse
|
| 67 |
+
66,226228,"models/lam.py",2499,7,"videos.",python,selection_mouse
|
| 68 |
+
67,226249,"models/lam.py",2499,12,"videos.shape",python,selection_mouse
|
| 69 |
+
68,226295,"models/lam.py",2499,13,"videos.shape[",python,selection_mouse
|
| 70 |
+
69,226338,"models/lam.py",2499,14,"videos.shape[:",python,selection_mouse
|
| 71 |
+
70,226339,"models/lam.py",2499,15,"videos.shape[:2",python,selection_mouse
|
| 72 |
+
71,226348,"models/lam.py",2499,16,"videos.shape[:2]",python,selection_mouse
|
| 73 |
+
72,226644,"models/lam.py",2515,0,"",python,selection_mouse
|
| 74 |
+
73,226648,"models/lam.py",2514,0,"",python,selection_command
|
| 75 |
+
74,226797,"models/lam.py",2515,0,"",python,selection_mouse
|
| 76 |
+
75,226799,"models/lam.py",2514,0,"",python,selection_command
|
| 77 |
+
76,226966,"models/lam.py",2514,1,"]",python,selection_mouse
|
| 78 |
+
77,227008,"models/lam.py",2515,0,"",python,selection_command
|
| 79 |
+
78,227009,"models/lam.py",2514,1,"]",python,selection_mouse
|
| 80 |
+
79,227013,"models/lam.py",2512,3,":2]",python,selection_mouse
|
| 81 |
+
80,227054,"models/lam.py",2506,9,"shape[:2]",python,selection_mouse
|
| 82 |
+
81,227100,"models/lam.py",2505,10,".shape[:2]",python,selection_mouse
|
| 83 |
+
82,227125,"models/lam.py",2499,16,"videos.shape[:2]",python,selection_mouse
|
| 84 |
+
83,227722,"models/lam.py",2500,0,"",python,selection_mouse
|
| 85 |
+
84,228365,"models/lam.py",2383,0,"",python,selection_mouse
|
| 86 |
+
85,228547,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 87 |
+
86,230444,"models/lam.py",2396,0,"",python,selection_mouse
|
| 88 |
+
87,230609,"models/lam.py",2393,6,"videos",python,selection_mouse
|
| 89 |
+
88,231398,"models/lam.py",2384,0,"",python,selection_mouse
|
| 90 |
+
89,231528,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 91 |
+
90,232096,"models/lam.py",2396,0,"",python,selection_mouse
|
| 92 |
+
91,232248,"models/lam.py",2393,6,"videos",python,selection_mouse
|
| 93 |
+
92,232911,"models/lam.py",2384,0,"",python,selection_mouse
|
| 94 |
+
93,233063,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 95 |
+
94,233900,"models/lam.py",2398,0,"",python,selection_mouse
|
| 96 |
+
95,234064,"models/lam.py",2393,6,"videos",python,selection_mouse
|
| 97 |
+
96,234808,"models/lam.py",2384,0,"",python,selection_mouse
|
| 98 |
+
97,234931,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 99 |
+
98,235535,"models/lam.py",2471,0,"",python,selection_mouse
|
| 100 |
+
99,236162,"models/lam.py",2396,0,"",python,selection_mouse
|
| 101 |
+
100,236277,"models/lam.py",2393,6,"videos",python,selection_mouse
|
| 102 |
+
101,237271,"models/lam.py",2382,0,"",python,selection_mouse
|
| 103 |
+
102,237422,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 104 |
+
103,239596,"models/lam.py",2396,0,"",python,selection_mouse
|
| 105 |
+
104,239714,"models/lam.py",2393,6,"videos",python,selection_mouse
|
| 106 |
+
105,243202,"models/lam.py",2460,0,"",python,selection_mouse
|
| 107 |
+
106,243586,"models/lam.py",2382,0,"",python,selection_mouse
|
| 108 |
+
107,243748,"models/lam.py",2377,9,"vq_encode",python,selection_mouse
|
| 109 |
+
108,247136,"models/dynamics.py",0,0,"",python,tab
|
| 110 |
+
109,251589,"models/dynamics.py",2886,0,"",python,selection_mouse
|
| 111 |
+
110,251711,"models/dynamics.py",2883,3,"pad",python,selection_mouse
|
| 112 |
+
111,252204,"models/dynamics.py",2829,0,"",python,selection_mouse
|
| 113 |
+
112,252345,"models/dynamics.py",2823,9,"action_up",python,selection_mouse
|
| 114 |
+
113,258257,"models/dynamics.py",3031,0,"",python,selection_mouse
|
| 115 |
+
114,258490,"models/dynamics.py",3031,1,"l",python,selection_mouse
|
| 116 |
+
115,258506,"models/dynamics.py",3031,3,"lf.",python,selection_mouse
|
| 117 |
+
116,258553,"models/dynamics.py",3031,4,"lf.d",python,selection_mouse
|
| 118 |
+
117,258581,"models/dynamics.py",3031,5,"lf.dy",python,selection_mouse
|
| 119 |
+
118,258582,"models/dynamics.py",3031,6,"lf.dyn",python,selection_mouse
|
| 120 |
+
119,260088,"models/dynamics.py",3040,0,"",python,selection_mouse
|
| 121 |
+
120,263134,"models/dynamics.py",2987,0,"",python,selection_mouse
|
| 122 |
+
121,264608,"models/dynamics.py",2830,0,"",python,selection_mouse
|
| 123 |
+
122,264753,"models/dynamics.py",2823,9,"action_up",python,selection_mouse
|
| 124 |
+
123,275701,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(30):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab
|
| 125 |
+
124,307289,"genie.py",3010,0,"",python,selection_mouse
|
| 126 |
+
125,307857,"genie.py",2931,0,"",python,selection_mouse
|
| 127 |
+
126,308409,"genie.py",3020,0,"",python,selection_mouse
|
| 128 |
+
127,308566,"genie.py",3017,9,"vq_encode",python,selection_mouse
|
| 129 |
+
128,409573,"genie.py",3604,0,"",python,selection_mouse
|
| 130 |
+
129,409576,"genie.py",3603,0,"",python,selection_command
|
| 131 |
+
130,410046,"genie.py",3772,0,"",python,selection_mouse
|
| 132 |
+
131,410054,"genie.py",3771,0,"",python,selection_command
|
| 133 |
+
132,410559,"genie.py",3797,0,"",python,selection_mouse
|
| 134 |
+
133,411067,"genie.py",3772,0,"",python,selection_mouse
|
| 135 |
+
134,411080,"genie.py",3771,0,"",python,selection_command
|
| 136 |
+
135,412609,"TERMINAL",0,0,"bash",,terminal_focus
|
| 137 |
+
136,464986,"models/dynamics.py",0,0,"",python,tab
|
| 138 |
+
137,469860,"models/lam.py",0,0,"",python,tab
|
| 139 |
+
138,473097,"models/lam.py",2697,0,"",python,selection_mouse
|
| 140 |
+
139,473666,"models/lam.py",2613,0,"",python,selection_mouse
|
| 141 |
+
140,475909,"models/lam.py",2654,0,"\n ",python,content
|
| 142 |
+
141,476310,"models/lam.py",2663,0,"#",python,content
|
| 143 |
+
142,476311,"models/lam.py",2664,0,"",python,selection_keyboard
|
| 144 |
+
143,476601,"models/lam.py",2664,0," ",python,content
|
| 145 |
+
144,476602,"models/lam.py",2665,0,"",python,selection_keyboard
|
| 146 |
+
145,477268,"models/lam.py",2665,0,"D",python,content
|
| 147 |
+
146,477269,"models/lam.py",2666,0,"",python,selection_keyboard
|
| 148 |
+
147,477371,"models/lam.py",2666,0,"O",python,content
|
| 149 |
+
148,477373,"models/lam.py",2667,0,"",python,selection_keyboard
|
| 150 |
+
149,477751,"models/lam.py",2667,0," ",python,content
|
| 151 |
+
150,477752,"models/lam.py",2668,0,"",python,selection_keyboard
|
| 152 |
+
151,478055,"models/lam.py",2667,1,"",python,content
|
| 153 |
+
152,478165,"models/lam.py",2666,1,"",python,content
|
| 154 |
+
153,478312,"models/lam.py",2665,1,"",python,content
|
| 155 |
+
154,479439,"models/lam.py",2665,0,"F",python,content
|
| 156 |
+
155,479440,"models/lam.py",2666,0,"",python,selection_keyboard
|
| 157 |
+
156,479576,"models/lam.py",2666,0,"I",python,content
|
| 158 |
+
157,479577,"models/lam.py",2667,0,"",python,selection_keyboard
|
| 159 |
+
158,479721,"models/lam.py",2667,0,"X",python,content
|
| 160 |
+
159,479722,"models/lam.py",2668,0,"",python,selection_keyboard
|
| 161 |
+
160,480012,"models/lam.py",2668,0,"M",python,content
|
| 162 |
+
161,480013,"models/lam.py",2669,0,"",python,selection_keyboard
|
| 163 |
+
162,480115,"models/lam.py",2669,0,"E",python,content
|
| 164 |
+
163,480116,"models/lam.py",2670,0,"",python,selection_keyboard
|
| 165 |
+
164,480284,"models/lam.py",2670,0," ",python,content
|
| 166 |
+
165,480285,"models/lam.py",2671,0,"",python,selection_keyboard
|
| 167 |
+
166,480618,"models/lam.py",2671,0,"M",python,content
|
| 168 |
+
167,480619,"models/lam.py",2672,0,"",python,selection_keyboard
|
| 169 |
+
168,480801,"models/lam.py",2672,0,"I",python,content
|
| 170 |
+
169,480802,"models/lam.py",2673,0,"",python,selection_keyboard
|
| 171 |
+
170,481171,"models/lam.py",2672,1,"",python,content
|
| 172 |
+
171,481546,"models/lam.py",2672,0,"i",python,content
|
| 173 |
+
172,481547,"models/lam.py",2673,0,"",python,selection_keyboard
|
| 174 |
+
173,481820,"models/lam.py",2672,1,"",python,content
|
| 175 |
+
174,482134,"models/lam.py",2671,1,"",python,content
|
| 176 |
+
175,482288,"models/lam.py",2671,0,"m",python,content
|
| 177 |
+
176,482289,"models/lam.py",2672,0,"",python,selection_keyboard
|
| 178 |
+
177,482455,"models/lam.py",2672,0,"i",python,content
|
| 179 |
+
178,482456,"models/lam.py",2673,0,"",python,selection_keyboard
|
| 180 |
+
179,482555,"models/lam.py",2673,0,"h",python,content
|
| 181 |
+
180,482556,"models/lam.py",2674,0,"",python,selection_keyboard
|
| 182 |
+
181,482649,"models/lam.py",2674,0,"i",python,content
|
| 183 |
+
182,482650,"models/lam.py",2675,0,"",python,selection_keyboard
|
| 184 |
+
183,482729,"models/lam.py",2675,0,"r",python,content
|
| 185 |
+
184,482730,"models/lam.py",2676,0,"",python,selection_keyboard
|
| 186 |
+
185,482867,"models/lam.py",2676,0," ",python,content
|
| 187 |
+
186,482868,"models/lam.py",2677,0,"",python,selection_keyboard
|
| 188 |
+
187,483095,"models/lam.py",2677,0,"d",python,content
|
| 189 |
+
188,483096,"models/lam.py",2678,0,"",python,selection_keyboard
|
| 190 |
+
189,483251,"models/lam.py",2678,0,"o",python,content
|
| 191 |
+
190,483252,"models/lam.py",2679,0,"",python,selection_keyboard
|
| 192 |
+
191,483382,"models/lam.py",2679,0," ",python,content
|
| 193 |
+
192,483383,"models/lam.py",2680,0,"",python,selection_keyboard
|
| 194 |
+
193,483533,"models/lam.py",2680,0,"t",python,content
|
| 195 |
+
194,483534,"models/lam.py",2681,0,"",python,selection_keyboard
|
| 196 |
+
195,483598,"models/lam.py",2681,0,"h",python,content
|
| 197 |
+
196,483599,"models/lam.py",2682,0,"",python,selection_keyboard
|
| 198 |
+
197,483724,"models/lam.py",2682,0,"i",python,content
|
| 199 |
+
198,483725,"models/lam.py",2683,0,"",python,selection_keyboard
|
| 200 |
+
199,483772,"models/lam.py",2683,0,"s",python,content
|
| 201 |
+
200,483773,"models/lam.py",2684,0,"",python,selection_keyboard
|
| 202 |
+
201,483880,"models/lam.py",2684,0," ",python,content
|
| 203 |
+
202,483881,"models/lam.py",2685,0,"",python,selection_keyboard
|
| 204 |
+
203,484433,"models/lam.py",2685,0,"t",python,content
|
| 205 |
+
204,484434,"models/lam.py",2686,0,"",python,selection_keyboard
|
| 206 |
+
205,484520,"models/lam.py",2686,0,"h",python,content
|
| 207 |
+
206,484521,"models/lam.py",2687,0,"",python,selection_keyboard
|
| 208 |
+
207,484628,"models/lam.py",2687,0,"e",python,content
|
| 209 |
+
208,484629,"models/lam.py",2688,0,"",python,selection_keyboard
|
| 210 |
+
209,484715,"models/lam.py",2688,0," ",python,content
|
| 211 |
+
210,484716,"models/lam.py",2689,0,"",python,selection_keyboard
|
| 212 |
+
211,484805,"models/lam.py",2689,0,"o",python,content
|
| 213 |
+
212,484806,"models/lam.py",2690,0,"",python,selection_keyboard
|
| 214 |
+
213,484879,"models/lam.py",2690,0,"t",python,content
|
| 215 |
+
214,484880,"models/lam.py",2691,0,"",python,selection_keyboard
|
| 216 |
+
215,485012,"models/lam.py",2691,0,"h",python,content
|
| 217 |
+
216,485012,"models/lam.py",2692,0,"",python,selection_keyboard
|
| 218 |
+
217,485111,"models/lam.py",2692,0,"e",python,content
|
| 219 |
+
218,485112,"models/lam.py",2693,0,"",python,selection_keyboard
|
| 220 |
+
219,485171,"models/lam.py",2693,0,"r",python,content
|
| 221 |
+
220,485173,"models/lam.py",2694,0,"",python,selection_keyboard
|
| 222 |
+
221,485283,"models/lam.py",2694,0," ",python,content
|
| 223 |
+
222,485284,"models/lam.py",2695,0,"",python,selection_keyboard
|
| 224 |
+
223,485667,"models/lam.py",2695,0,"w",python,content
|
| 225 |
+
224,485668,"models/lam.py",2696,0,"",python,selection_keyboard
|
| 226 |
+
225,485881,"models/lam.py",2696,0,"a",python,content
|
| 227 |
+
226,485882,"models/lam.py",2697,0,"",python,selection_keyboard
|
| 228 |
+
227,486046,"models/lam.py",2697,0,"y",python,content
|
| 229 |
+
228,486047,"models/lam.py",2698,0,"",python,selection_keyboard
|
| 230 |
+
229,486139,"models/lam.py",2698,0," ",python,content
|
| 231 |
+
230,486140,"models/lam.py",2699,0,"",python,selection_keyboard
|
| 232 |
+
231,486310,"models/lam.py",2699,0,"a",python,content
|
| 233 |
+
232,486311,"models/lam.py",2700,0,"",python,selection_keyboard
|
| 234 |
+
233,486497,"models/lam.py",2700,0,"r",python,content
|
| 235 |
+
234,486498,"models/lam.py",2701,0,"",python,selection_keyboard
|
| 236 |
+
235,486789,"models/lam.py",2701,0,"o",python,content
|
| 237 |
+
236,486789,"models/lam.py",2702,0,"",python,selection_keyboard
|
| 238 |
+
237,486986,"models/lam.py",2702,0,"u",python,content
|
| 239 |
+
238,486987,"models/lam.py",2703,0,"",python,selection_keyboard
|
| 240 |
+
239,487032,"models/lam.py",2703,0,"n",python,content
|
| 241 |
+
240,487034,"models/lam.py",2704,0,"",python,selection_keyboard
|
| 242 |
+
241,487122,"models/lam.py",2704,0,"d",python,content
|
| 243 |
+
242,487123,"models/lam.py",2705,0,"",python,selection_keyboard
|
| 244 |
+
243,487824,"models/lam.py",2704,0,"",python,selection_command
|
| 245 |
+
244,555157,"models/lam.py",3222,0,"",python,selection_mouse
|
| 246 |
+
245,562923,"models/lam.py",2949,0,"",python,selection_mouse
|
| 247 |
+
246,562925,"models/lam.py",2948,0,"",python,selection_command
|
| 248 |
+
247,563531,"models/lam.py",2950,0,"",python,selection_mouse
|
| 249 |
+
248,564154,"models/lam.py",2934,16," # (B, T-1, E)\n",python,selection_mouse
|
| 250 |
+
249,564172,"models/lam.py",2933,17,"] # (B, T-1, E)\n",python,selection_mouse
|
| 251 |
+
250,564185,"models/lam.py",2932,18,"0] # (B, T-1, E)\n",python,selection_mouse
|
| 252 |
+
251,564200,"models/lam.py",2931,19," 0] # (B, T-1, E)\n",python,selection_mouse
|
| 253 |
+
252,564218,"models/lam.py",2930,20,", 0] # (B, T-1, E)\n",python,selection_mouse
|
| 254 |
+
253,564246,"models/lam.py",2879,71,"nt action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 255 |
+
254,564264,"models/lam.py",2878,72,"ent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 256 |
+
255,564309,"models/lam.py",2877,73,"tent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 257 |
+
256,564354,"models/lam.py",2876,74,"atent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 258 |
+
257,564355,"models/lam.py",2875,75,"latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 259 |
+
258,564355,"models/lam.py",2874,76," latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 260 |
+
259,564356,"models/lam.py",2873,77,"t latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 261 |
+
260,564398,"models/lam.py",2872,78,"et latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 262 |
+
261,564399,"models/lam.py",2871,79,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 263 |
+
262,564444,"models/lam.py",2870,80," Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 264 |
+
263,564450,"models/lam.py",2920,30," = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 265 |
+
264,564490,"models/lam.py",2919,31,"z = z[:, 1:, 0] # (B, T-1, E)\n",python,selection_mouse
|
| 266 |
+
265,564975,"models/lam.py",2919,0,"",python,selection_mouse
|
| 267 |
+
266,567444,"models/lam.py",2736,0,"",python,selection_mouse
|
| 268 |
+
267,568071,"models/lam.py",2739,0,"",python,selection_mouse
|
| 269 |
+
268,568203,"models/lam.py",2735,11,"concatenate",python,selection_mouse
|
| 270 |
+
269,568809,"models/lam.py",2753,0,"",python,selection_mouse
|
| 271 |
+
270,569464,"models/lam.py",2677,0,"",python,selection_mouse
|
| 272 |
+
271,569624,"models/lam.py",2677,2,"do",python,selection_mouse
|
| 273 |
+
272,569807,"models/lam.py",2677,7,"do this",python,selection_mouse
|
| 274 |
+
273,569847,"models/lam.py",2677,11,"do this the",python,selection_mouse
|
| 275 |
+
274,569851,"models/lam.py",2677,17,"do this the other",python,selection_mouse
|
| 276 |
+
275,569893,"models/lam.py",2677,18,"do this the other ",python,selection_mouse
|
| 277 |
+
276,569894,"models/lam.py",2677,21,"do this the other way",python,selection_mouse
|
| 278 |
+
277,569902,"models/lam.py",2677,28,"do this the other way around",python,selection_mouse
|
| 279 |
+
278,570192,"models/lam.py",2705,0,"",python,selection_mouse
|
| 280 |
+
279,570199,"models/lam.py",2704,0,"",python,selection_command
|
| 281 |
+
280,570389,"models/lam.py",2699,6,"around",python,selection_mouse
|
| 282 |
+
281,570435,"models/lam.py",2700,5,"round",python,selection_command
|
| 283 |
+
282,570600,"models/lam.py",2700,58,"round\n padded_patches = jnp.concatenate((action_pad",python,selection_mouse
|
| 284 |
+
283,570641,"models/lam.py",2700,46,"round\n padded_patches = jnp.concatenate",python,selection_mouse
|
| 285 |
+
284,570690,"models/lam.py",2680,20,"this the other way a",python,selection_mouse
|
| 286 |
+
285,570734,"models/lam.py",2679,21," this the other way a",python,selection_mouse
|
| 287 |
+
286,570735,"models/lam.py",2677,23,"do this the other way a",python,selection_mouse
|
| 288 |
+
287,570735,"models/lam.py",2676,24," do this the other way a",python,selection_mouse
|
| 289 |
+
288,570777,"models/lam.py",2671,29,"mihir do this the other way a",python,selection_mouse
|
| 290 |
+
289,570782,"models/lam.py",2700,28,"round\n padded_patches",python,selection_mouse
|
| 291 |
+
290,571048,"models/lam.py",2700,14,"round\n ",python,selection_mouse
|
| 292 |
+
291,571160,"models/lam.py",2663,37,"# FIXME mihir do this the other way a",python,selection_mouse
|
| 293 |
+
292,571227,"models/lam.py",2664,36," FIXME mihir do this the other way a",python,selection_mouse
|
| 294 |
+
293,571574,"models/lam.py",2664,0,"",python,selection_mouse
|
| 295 |
+
294,571574,"models/lam.py",2664,1," ",python,selection_mouse
|
| 296 |
+
295,571784,"models/lam.py",2664,6," FIXME",python,selection_mouse
|
| 297 |
+
296,571784,"models/lam.py",2664,12," FIXME mihir",python,selection_mouse
|
| 298 |
+
297,571816,"models/lam.py",2664,15," FIXME mihir do",python,selection_mouse
|
| 299 |
+
298,571834,"models/lam.py",2664,20," FIXME mihir do this",python,selection_mouse
|
| 300 |
+
299,571865,"models/lam.py",2664,21," FIXME mihir do this ",python,selection_mouse
|
| 301 |
+
300,571868,"models/lam.py",2664,24," FIXME mihir do this the",python,selection_mouse
|
| 302 |
+
301,571911,"models/lam.py",2664,25," FIXME mihir do this the ",python,selection_mouse
|
| 303 |
+
302,571912,"models/lam.py",2664,30," FIXME mihir do this the other",python,selection_mouse
|
| 304 |
+
303,571918,"models/lam.py",2606,59,"self.action_in, (B, T, 1, self.patch_token_dim))\n # ",python,selection_mouse
|
| 305 |
+
304,572011,"models/lam.py",2610,55,".action_in, (B, T, 1, self.patch_token_dim))\n # ",python,selection_mouse
|
| 306 |
+
305,572012,"models/lam.py",2611,54,"action_in, (B, T, 1, self.patch_token_dim))\n # ",python,selection_mouse
|
| 307 |
+
306,572664,"models/lam.py",2619,0,"",python,selection_mouse
|
| 308 |
+
307,573004,"models/lam.py",2705,0,"",python,selection_mouse
|
| 309 |
+
308,573005,"models/lam.py",2704,0,"",python,selection_command
|
| 310 |
+
309,573166,"models/lam.py",2704,1,"d",python,selection_mouse
|
| 311 |
+
310,573167,"models/lam.py",2702,2,"un",python,selection_mouse
|
| 312 |
+
311,573211,"models/lam.py",2705,0,"",python,selection_command
|
| 313 |
+
312,573213,"models/lam.py",2698,7," around",python,selection_mouse
|
| 314 |
+
313,573213,"models/lam.py",2689,16,"other way around",python,selection_mouse
|
| 315 |
+
314,573223,"models/lam.py",2681,24,"his the other way around",python,selection_mouse
|
| 316 |
+
315,573235,"models/lam.py",2677,28,"do this the other way around",python,selection_mouse
|
| 317 |
+
316,573262,"models/lam.py",2675,30,"r do this the other way around",python,selection_mouse
|
| 318 |
+
317,573268,"models/lam.py",2672,33,"ihir do this the other way around",python,selection_mouse
|
| 319 |
+
318,573286,"models/lam.py",2671,34,"mihir do this the other way around",python,selection_mouse
|
| 320 |
+
319,573307,"models/lam.py",2670,35," mihir do this the other way around",python,selection_mouse
|
| 321 |
+
320,573350,"models/lam.py",2669,36,"E mihir do this the other way around",python,selection_mouse
|
| 322 |
+
321,573437,"models/lam.py",2668,37,"ME mihir do this the other way around",python,selection_mouse
|
| 323 |
+
322,573449,"models/lam.py",2667,38,"XME mihir do this the other way around",python,selection_mouse
|
| 324 |
+
323,573483,"models/lam.py",2666,39,"IXME mihir do this the other way around",python,selection_mouse
|
| 325 |
+
324,573529,"models/lam.py",2665,40,"FIXME mihir do this the other way around",python,selection_mouse
|
| 326 |
+
325,573571,"models/lam.py",2664,41," FIXME mihir do this the other way around",python,selection_mouse
|
| 327 |
+
326,573660,"models/lam.py",2663,42,"# FIXME mihir do this the other way around",python,selection_mouse
|
| 328 |
+
327,574100,"models/lam.py",2663,0,"",python,selection_mouse
|
| 329 |
+
328,576800,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab
|
| 330 |
+
329,579332,"models/lam.py",0,0,"",python,tab
|
| 331 |
+
330,580473,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab
|
| 332 |
+
331,591956,"genie.py",0,0,"",python,tab
|
| 333 |
+
332,688925,"genie.py",5504,0,"",python,selection_mouse
|
| 334 |
+
333,688934,"genie.py",5503,0,"",python,selection_command
|
| 335 |
+
334,689596,"genie.py",5609,0,"",python,selection_mouse
|
| 336 |
+
335,689637,"genie.py",5608,0,"",python,selection_command
|
| 337 |
+
336,690705,"genie.py",5714,0,"",python,selection_mouse
|
| 338 |
+
337,690707,"genie.py",5713,0,"",python,selection_command
|
| 339 |
+
338,691487,"genie.py",5631,0,"",python,selection_mouse
|
| 340 |
+
339,691487,"genie.py",5630,0,"",python,selection_command
|
| 341 |
+
340,756358,"genie.py",5469,0,"",python,selection_mouse
|
| 342 |
+
341,757517,"genie.py",5469,1,"2",python,content
|
| 343 |
+
342,758824,"genie.py",5546,0,"",python,selection_mouse
|
| 344 |
+
343,759698,"genie.py",5631,0,"",python,selection_mouse
|
| 345 |
+
344,759708,"genie.py",5630,0,"",python,selection_command
|
| 346 |
+
345,759993,"genie.py",5545,0,"",python,selection_mouse
|
| 347 |
+
346,760169,"genie.py",5541,15,"token_idxs_full",python,selection_mouse
|
| 348 |
+
347,760755,"genie.py",5600,0,"",python,selection_mouse
|
| 349 |
+
348,760916,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 350 |
+
349,761613,"genie.py",5549,0,"",python,selection_mouse
|
| 351 |
+
350,761767,"genie.py",5541,15,"token_idxs_full",python,selection_mouse
|
| 352 |
+
351,762479,"genie.py",5602,0,"",python,selection_mouse
|
| 353 |
+
352,762615,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 354 |
+
353,767312,"genie.py",5602,0,"",python,selection_mouse
|
| 355 |
+
354,768315,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 356 |
+
355,776474,"TERMINAL",0,0,"bash",,terminal_focus
|
| 357 |
+
356,779711,"genie.py",5603,0,"",python,selection_mouse
|
| 358 |
+
357,779838,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 359 |
+
358,780696,"genie.py",5603,0,"",python,selection_mouse
|
| 360 |
+
359,782745,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 361 |
+
360,783651,"genie.py",5603,0,"",python,selection_mouse
|
| 362 |
+
361,784226,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 363 |
+
362,784989,"genie.py",5603,0,"",python,selection_mouse
|
| 364 |
+
363,785492,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 365 |
+
364,786034,"genie.py",5603,0,"",python,selection_mouse
|
| 366 |
+
365,787382,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 367 |
+
366,788115,"genie.py",5603,0,"",python,selection_mouse
|
| 368 |
+
367,788525,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 369 |
+
368,789072,"genie.py",5603,0,"",python,selection_mouse
|
| 370 |
+
369,789265,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 371 |
+
370,789951,"genie.py",5603,0,"",python,selection_mouse
|
| 372 |
+
371,790570,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 373 |
+
372,791316,"genie.py",5603,0,"",python,selection_mouse
|
| 374 |
+
373,791700,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 375 |
+
374,792268,"genie.py",5603,0,"",python,selection_mouse
|
| 376 |
+
375,792451,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 377 |
+
376,793167,"genie.py",5603,0,"",python,selection_mouse
|
| 378 |
+
377,794510,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 379 |
+
378,795142,"genie.py",5603,0,"",python,selection_mouse
|
| 380 |
+
379,795521,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 381 |
+
380,795900,"genie.py",5558,52," ""latent_actions"": action_tokens\n",python,selection_mouse
|
| 382 |
+
381,796297,"genie.py",5603,0,"",python,selection_mouse
|
| 383 |
+
382,796298,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 384 |
+
383,796497,"genie.py",5558,52," ""latent_actions"": action_tokens\n",python,selection_mouse
|
| 385 |
+
384,796867,"genie.py",5603,0,"",python,selection_mouse
|
| 386 |
+
385,796868,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 387 |
+
386,797052,"genie.py",5558,52," ""latent_actions"": action_tokens\n",python,selection_mouse
|
| 388 |
+
387,797416,"genie.py",5603,0,"",python,selection_mouse
|
| 389 |
+
388,797417,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 390 |
+
389,797585,"genie.py",5558,52," ""latent_actions"": action_tokens\n",python,selection_mouse
|
| 391 |
+
390,797927,"genie.py",5603,0,"",python,selection_mouse
|
| 392 |
+
391,797928,"genie.py",5596,13,"action_tokens",python,selection_mouse
|
| 393 |
+
392,798088,"genie.py",5558,52," ""latent_actions"": action_tokens\n",python,selection_mouse
|
| 394 |
+
393,798692,"genie.py",5603,0,"",python,selection_mouse
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7ec41ffb-ddad-4e1d-b171-0513171669281757061617849-2025_09_05-10.41.46.448/source.csv
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,4,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\n\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n name: str = """"\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""rng""]}\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n rng = jax.random.PRNGKey(args.seed)\n if args.log:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_resolution, args.image_resolution, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n init_params[""params""].update(\n PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\n )\n # Assume checkpoint is of the form tokenizer_<timestamp>_<step>\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # --- TRAIN LOOP ---\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n inputs = dict(videos=videos, rng=_rng)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(\n os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""\n ),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab
|
| 3 |
+
2,1363,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:41:46 AM [info] Activating crowd-code\n10:41:46 AM [info] Recording started\n10:41:46 AM [info] Initializing git provider using file system watchers...\n",Log,tab
|
| 4 |
+
3,1867,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:41:47 AM [info] Git repository found\n10:41:47 AM [info] Git provider initialized successfully\n10:41:47 AM [info] Initial git state: [object Object]\n",Log,content
|
| 5 |
+
4,5691,"train_tokenizer.py",0,0,"",python,tab
|
| 6 |
+
5,53960,"train_tokenizer.py",138,0,"",python,selection_mouse
|
| 7 |
+
6,58325,"train_tokenizer.py",7112,0,"",python,selection_mouse
|
| 8 |
+
7,58366,"train_tokenizer.py",7111,0,"",python,selection_command
|
| 9 |
+
8,159487,"TERMINAL",0,0,"undefined[tum_cte0515@hkn1991 jafar]$ idling",,terminal_command
|
| 10 |
+
9,159551,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;14r(B[m[4l[?7h[H[2JEvery 1.0s: sinfo_t_idle[1;96Hhkn1991.localdomain: Fri Sep 5 10:44:25 2025[3;1HPartition dev_cpuonly[3;35H: 11 nodes idle\r[4dPartition cpuonly[4;35H: 57 nodes idle\r[5dPartition dev_accelerated[5;35H:\t 1 nodes idle\r[6dPartition accelerated[6;35H: 23 nodes idle\r[7dPartition dev_accelerated-h100 :\t 0 nodes idle\r[8dPartition accelerated-h100[8;35H:\t 0 nodes idle\r[9dPartition large[9;35H:\t 5 nodes idle\r[10dPartition accelerated-h200[10;35H:\t 4 nodes idle[14;140H",,terminal_output
|
| 11 |
+
10,160574,"TERMINAL",0,0,"[1;135H6[14;140H",,terminal_output
|
| 12 |
+
11,161591,"TERMINAL",0,0,"[1;135H7[14;140H",,terminal_output
|
| 13 |
+
12,162626,"TERMINAL",0,0,"[1;135H8[14;140H",,terminal_output
|
| 14 |
+
13,163666,"TERMINAL",0,0,"[1;135H9[14;140H",,terminal_output
|
| 15 |
+
14,164777,"TERMINAL",0,0,"[1;134H30[14;140H",,terminal_output
|
| 16 |
+
15,165806,"TERMINAL",0,0,"[1;135H1[14;140H",,terminal_output
|
| 17 |
+
16,166849,"TERMINAL",0,0,"[1;135H3[14;140H",,terminal_output
|
| 18 |
+
17,167831,"TERMINAL",0,0,"[1;135H4[14;140H",,terminal_output
|
| 19 |
+
18,169142,"TERMINAL",0,0,"[1;135H5[14;140H",,terminal_output
|
| 20 |
+
19,170027,"TERMINAL",0,0,"[1;135H6[14;140H",,terminal_output
|
| 21 |
+
20,171110,"TERMINAL",0,0,"[1;135H7[14;140H",,terminal_output
|
| 22 |
+
21,172057,"TERMINAL",0,0,"[14;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar",,terminal_output
|
| 23 |
+
22,173319,"TERMINAL",0,0,"queue",,terminal_command
|
| 24 |
+
23,173390,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;14r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;96Hhkn1991.localdomain: Fri Sep 5 10:44:39 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3465675 accelerat train_la tum_cte0 R 12:43:56\t 8 hkn[0703,0706-0707,0711-0715][5;12H3465676 accelerat train_la tum_cte0 R 12:43:56\t 8 hkn[0521-0528][6;12H3465677 accelerat train_la tum_cte0 R 12:43:56\t 8 hkn[0504,0520,0720,0722-0724,0728,0731][7;12H3466286 accelerat train_to tum_cte0 R 13:28:44\t 1 hkn0736[8;12H3466287 accelerat train_la tum_cte0 R 13:28:44\t 1 hkn0736[14;140H",,terminal_output
|
| 25 |
+
24,174408,"TERMINAL",0,0,"[1;134H40[4;60H7[5d7[6d7[7d5[8d5[14;140H",,terminal_output
|
| 26 |
+
25,175471,"TERMINAL",0,0,"[1;135H1[4;60H8[5d8[6d8[7d6[8d6[14;140H",,terminal_output
|
| 27 |
+
26,176485,"TERMINAL",0,0,"[1;135H2[4;60H9[5d9[6d9[7d7[8d7[14;140H",,terminal_output
|
| 28 |
+
27,177528,"TERMINAL",0,0,"[1;135H3[4;57H4:00[5;57H4:00[6;57H4:00[7d8[8d8[14;140H",,terminal_output
|
| 29 |
+
28,178649,"TERMINAL",0,0,"[1;135H4[4;60H1[5d1[6d1[7d9[8d9[14;140H",,terminal_output
|
| 30 |
+
29,179612,"TERMINAL",0,0,"[1;135H5[4;60H2[5d2[6d2[7d50[8d50[14;140H",,terminal_output
|
| 31 |
+
30,180679,"TERMINAL",0,0,"[1;135H6[4;60H3[5d3[6d3[7d1[8d1[14;140H",,terminal_output
|
| 32 |
+
31,181719,"TERMINAL",0,0,"[1;135H7[4;60H4[5d4[6d4[7d2[8d2[14;140H",,terminal_output
|
| 33 |
+
32,182779,"TERMINAL",0,0,"[1;135H8[4;60H6[5d6[6d6[7d4[8d4[14;140H",,terminal_output
|
| 34 |
+
33,183858,"TERMINAL",0,0,"[1;134H50[4;60H7[5d7[6d7[7d5[8d5[14;140H",,terminal_output
|
| 35 |
+
34,184996,"TERMINAL",0,0,"[1;135H1[4;60H8[5d8[6d8[7d6[8d6[14;140H",,terminal_output
|
| 36 |
+
35,185902,"TERMINAL",0,0,"[1;135H2[4;60H9[5d9[6d9[7d7[8d7[14;140H",,terminal_output
|
| 37 |
+
36,186927,"TERMINAL",0,0,"[1;135H3[4;59H10[5d10[6d10[7d8[8d8[14;140H",,terminal_output
|
| 38 |
+
37,188006,"TERMINAL",0,0,"[1;135H4[4;60H1[5d1[6d1[7d9[8d9[14;140H",,terminal_output
|
| 39 |
+
38,189079,"TERMINAL",0,0,"[1;135H5[4;60H2[5d2[6d2[7;57H9:00[8;57H9:00[14;140H",,terminal_output
|
| 40 |
+
39,190163,"TERMINAL",0,0,"[1;135H6[4;60H3[5d3[6d3[7d1[8d1[14;140H",,terminal_output
|
| 41 |
+
40,190333,"TERMINAL",0,0,"[14;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar",,terminal_output
|
| 42 |
+
41,200056,"TERMINAL",0,0,"cursor ../jasmine",,terminal_command
|
| 43 |
+
42,200101,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 44 |
+
43,200182,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar",,terminal_output
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-92e279fe-1f26-4694-b71c-d8f950b76bd11757499356165-2025_09_10-12.16.58.191/source.csv
ADDED
|
@@ -0,0 +1,463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,4,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab
|
| 3 |
+
2,3124,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:16:58 PM [info] Activating crowd-code\n12:16:58 PM [info] Recording started\n12:16:58 PM [info] Initializing git provider using file system watchers...\n12:16:59 PM [info] Git repository found\n12:16:59 PM [info] Git provider initialized successfully\n12:16:59 PM [info] Initial git state: [object Object]\n",Log,tab
|
| 4 |
+
3,5660,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab
|
| 5 |
+
4,23989,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command
|
| 6 |
+
5,24001,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 7 |
+
6,29626,"TERMINAL",0,0,"queue",,terminal_command
|
| 8 |
+
7,29650,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;85Hhkn1993.localdomain: Wed Sep 10 12:17:27 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[13;129H",,terminal_output
|
| 9 |
+
8,30414,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 10 |
+
9,31320,"TERMINAL",0,0,"idling",,terminal_command
|
| 11 |
+
10,31418,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: sinfo_t_idle[1;85Hhkn1993.localdomain: Wed Sep 10 12:17:29 2025[3;1HPartition dev_cpuonly[3;35H: 10 nodes idle\r[4dPartition cpuonly[4;35H: 28 nodes idle\r[5dPartition dev_accelerated[5;35H:\t 0 nodes idle\r[6dPartition accelerated[6;35H: 29 nodes idle\r[7dPartition dev_accelerated-h100 :\t 0 nodes idle\r[8dPartition accelerated-h100[8;35H:\t 0 nodes idle\r[9dPartition large[9;35H:\t 4 nodes idle\r[10dPartition accelerated-h200[10;35H:\t 0 nodes idle[13;129H",,terminal_output
|
| 12 |
+
11,32432,"TERMINAL",0,0,"[1;123H30[13d\t",,terminal_output
|
| 13 |
+
12,33638,"TERMINAL",0,0,"[1;124H1[13d\t",,terminal_output
|
| 14 |
+
13,34475,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 15 |
+
14,38862,"slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=10:00:00\n#SBATCH --partition=large\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\nsource .venv/bin/activate\n\npython generate_dataset.py \\n --min_episode_length 1000 \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test \\n --num_episodes 50",shellscript,tab
|
| 16 |
+
15,61173,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --log \\n --name=coinrun-tokenizer-singlechunk-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 250 \\n --data_dir $array_records_dir_train\n",shellscript,tab
|
| 17 |
+
16,75516,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",442,0,"",shellscript,selection_mouse
|
| 18 |
+
17,75546,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",441,0,"",shellscript,selection_command
|
| 19 |
+
18,79673,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,0,"",shellscript,selection_mouse
|
| 20 |
+
19,79898,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,1,"s",shellscript,selection_mouse
|
| 21 |
+
20,79900,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,3,"sin",shellscript,selection_mouse
|
| 22 |
+
21,79900,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,4,"sing",shellscript,selection_mouse
|
| 23 |
+
22,79901,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,5,"singl",shellscript,selection_mouse
|
| 24 |
+
23,80001,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,6,"single",shellscript,selection_mouse
|
| 25 |
+
24,80002,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,7,"singlec",shellscript,selection_mouse
|
| 26 |
+
25,80059,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,8,"singlech",shellscript,selection_mouse
|
| 27 |
+
26,80120,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,9,"singlechu",shellscript,selection_mouse
|
| 28 |
+
27,80144,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,10,"singlechun",shellscript,selection_mouse
|
| 29 |
+
28,80300,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,11,"singlechunk",shellscript,selection_mouse
|
| 30 |
+
29,81146,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,11,"",shellscript,content
|
| 31 |
+
30,82064,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1551,0,"d",shellscript,content
|
| 32 |
+
31,82065,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1552,0,"",shellscript,selection_keyboard
|
| 33 |
+
32,82250,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1552,0,"a",shellscript,content
|
| 34 |
+
33,82251,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1553,0,"",shellscript,selection_keyboard
|
| 35 |
+
34,82313,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1553,0,"t",shellscript,content
|
| 36 |
+
35,82314,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1554,0,"",shellscript,selection_keyboard
|
| 37 |
+
36,82397,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1554,0,"a",shellscript,content
|
| 38 |
+
37,82399,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1555,0,"",shellscript,selection_keyboard
|
| 39 |
+
38,82493,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1555,0,"s",shellscript,content
|
| 40 |
+
39,82494,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1556,0,"",shellscript,selection_keyboard
|
| 41 |
+
40,82705,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1556,0,"e",shellscript,content
|
| 42 |
+
41,82706,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1557,0,"",shellscript,selection_keyboard
|
| 43 |
+
42,82774,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1557,0,"t",shellscript,content
|
| 44 |
+
43,82775,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1558,0,"",shellscript,selection_keyboard
|
| 45 |
+
44,83305,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1558,0,"-",shellscript,content
|
| 46 |
+
45,83306,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1559,0,"",shellscript,selection_keyboard
|
| 47 |
+
46,83659,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1559,0,"t",shellscript,content
|
| 48 |
+
47,83660,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1560,0,"",shellscript,selection_keyboard
|
| 49 |
+
48,83770,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1560,0,"e",shellscript,content
|
| 50 |
+
49,83771,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1561,0,"",shellscript,selection_keyboard
|
| 51 |
+
50,83910,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1561,0,"s",shellscript,content
|
| 52 |
+
51,83911,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1562,0,"",shellscript,selection_keyboard
|
| 53 |
+
52,83961,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1562,0,"t",shellscript,content
|
| 54 |
+
53,83962,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1563,0,"",shellscript,selection_keyboard
|
| 55 |
+
54,84571,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1562,0,"",shellscript,selection_command
|
| 56 |
+
55,92932,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1760,0,"",shellscript,selection_mouse
|
| 57 |
+
56,93056,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1750,23,"array_records_dir_train",shellscript,selection_mouse
|
| 58 |
+
57,96922,"TERMINAL",0,0,"cd $ws_dir",,terminal_command
|
| 59 |
+
58,96934,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output
|
| 60 |
+
59,100051,"TERMINAL",0,0,"cd data_coinrun/",,terminal_command
|
| 61 |
+
60,100728,"TERMINAL",0,0,"ls",,terminal_command
|
| 62 |
+
61,100779,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 63 |
+
62,100866,"TERMINAL",0,0,"[0m[01;34marray_records[0m [01;34mcoinrun_episodes_10m[0m [01;34mcoinrun_episodes_10m_chunked_2[0m [01;34mcoinrun_episodes_10m_gt_actions_split[0m [01;34mdev[0m\r\n[01;34mcoinrun_episodes[0m [01;34mcoinrun_episodes_10m_chunked[0m [01;34mcoinrun_episodes_10m_gt_actions[0m [01;34mcoinrun_episodes_test[0m\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun",,terminal_output
|
| 64 |
+
63,105341,"TERMINAL",0,0,"cd coinrun_episodes_test",,terminal_command
|
| 65 |
+
64,106167,"TERMINAL",0,0,"pwd",,terminal_command
|
| 66 |
+
65,111333,"TERMINAL",0,0,"ls",,terminal_command
|
| 67 |
+
66,111395,"TERMINAL",0,0,"]633;Ccoinrun_episodes_0000.array_record coinrun_episodes_0001.array_record coinrun_episodes_0002.array_record metadata.json\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test",,terminal_output
|
| 68 |
+
67,116969,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",656,0,"",shellscript,selection_mouse
|
| 69 |
+
68,118998,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",656,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test",shellscript,content
|
| 70 |
+
69,120133,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",746,0,"\n",shellscript,content
|
| 71 |
+
70,122782,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",747,0,"a",shellscript,content
|
| 72 |
+
71,122782,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",748,0,"",shellscript,selection_keyboard
|
| 73 |
+
72,123001,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",748,0,"r",shellscript,content
|
| 74 |
+
73,123002,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",749,0,"",shellscript,selection_keyboard
|
| 75 |
+
74,123195,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",749,0,"r",shellscript,content
|
| 76 |
+
75,123196,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",750,0,"",shellscript,selection_keyboard
|
| 77 |
+
76,123373,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",750,0,"a",shellscript,content
|
| 78 |
+
77,123374,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",751,0,"",shellscript,selection_keyboard
|
| 79 |
+
78,123549,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",751,0,"y",shellscript,content
|
| 80 |
+
79,123550,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",752,0,"",shellscript,selection_keyboard
|
| 81 |
+
80,123722,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",752,0,"_",shellscript,content
|
| 82 |
+
81,123723,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",753,0,"",shellscript,selection_keyboard
|
| 83 |
+
82,124747,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",747,6,"array_records_dir_train",shellscript,content
|
| 84 |
+
83,125466,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",770,0,"=",shellscript,content
|
| 85 |
+
84,125466,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",771,0,"",shellscript,selection_keyboard
|
| 86 |
+
85,125945,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",770,0,"",shellscript,selection_command
|
| 87 |
+
86,126439,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",747,0,"",shellscript,selection_command
|
| 88 |
+
87,127192,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",747,0,"#",shellscript,content
|
| 89 |
+
88,127192,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",748,0,"",shellscript,selection_keyboard
|
| 90 |
+
89,127255,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",748,0," ",shellscript,content
|
| 91 |
+
90,127256,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",749,0,"",shellscript,selection_keyboard
|
| 92 |
+
91,127525,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",748,0,"",shellscript,selection_command
|
| 93 |
+
92,137035,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1638,0,"",shellscript,selection_command
|
| 94 |
+
93,139810,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1559,0,"",shellscript,selection_mouse
|
| 95 |
+
94,139818,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1558,0,"",shellscript,selection_command
|
| 96 |
+
95,174447,"TERMINAL",0,0,"cat metadata.json",,terminal_command
|
| 97 |
+
96,187629,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1400,0,"",shellscript,selection_mouse
|
| 98 |
+
97,187634,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1399,0,"",shellscript,selection_command
|
| 99 |
+
98,192227,"TERMINAL",0,0,"dev",,terminal_command
|
| 100 |
+
99,202007,"TERMINAL",0,0,"git branch",,terminal_command
|
| 101 |
+
100,202122,"TERMINAL",0,0,"]633;C[?1h=\r action-mapper[m[m\r\n add-wandb-name-and-tags[m[m\r\n before-nnx[m[m\r\n causal-mem-reduce[m[m\r\n causal-spatiotemporal-kv-cache[m[m\r\n causal-st-transformer[m[m\r\n causal-transformer-dynamics-model[m[m\r\n causal-transformer-nnx-no-kv-cache[m[m\r\n coinrun-gt-actions[m[m\r\n convert-to-jax-array-in-iter[m[m\r\n correct-batched-sampling[m[m\r\n dev[m[m\r\n:[K",,terminal_output
|
| 102 |
+
101,202673,"TERMINAL",0,0,"\r[K dont-let-tf-see-gpu[m[m\r\n:[K",,terminal_output
|
| 103 |
+
102,203441,"TERMINAL",0,0,"\r[K feat/darkness-filter[m[m\r\n:[K\r[K feat/explicit-image-dims[m[m\r\n:[K\r[K fix-action-padding-lam-future-information-access[m[m\r\n:[K\r[K fix-sampling[m[m\r\n:[K\r[K fix-transformer-forwardpass[m[m\r\n:[K\r[K fix/spatiotemporal-pe-once-in-STTransformer[m[m\r\n:[K\r[K grad-norm-log-and-clip[m[m\r\n:[K\r[K grain-dataloader[m[m\r\n:[K\r[K* [32minput_pipeline/add-npy2array_record[m[m\r\n:[K\r[K logging-variants[m[m\r\n:[K\r[K lr-schedules[m[m\r\n:[K",,terminal_output
|
| 104 |
+
103,203710,"TERMINAL",0,0,"\r[K main[m[m\r\n:[K\r[K maskgit-different-maskprob-per-sample[m[m\r\n:[K",,terminal_output
|
| 105 |
+
104,203828,"TERMINAL",0,0,"\r[K maskgit-sampling-iterative-unmasking-fix[m[m\r\n:[K\r[K metrics-logging-for-dynamics-model[m[m\r\n:[K\r[K monkey-patch[m[m\r\n:[K\r[K new-arch-sampling[m[m\r\n:[K\r[K preprocess_video[m[m\r\n:[K",,terminal_output
|
| 106 |
+
105,203835,"TERMINAL",0,0,"\r[K refactor-tmp[m[m\r\n:[K\r[K revised-dataloader[m[m\r\n:[K\r[K runner[m[m\r\n:[K",,terminal_output
|
| 107 |
+
106,203954,"TERMINAL",0,0,"\r[K runner-grain[m[m\r\n:[K\r[K sample-ali-branch[m[m\r\n:[K\r[K sample-from-different-topologies[m[m\r\n:[K",,terminal_output
|
| 108 |
+
107,204315,"TERMINAL",0,0,"\r[K[HM main[m[m\r\n[13;1H\r[K:[K",,terminal_output
|
| 109 |
+
108,204550,"TERMINAL",0,0,"\r[K[HM lr-schedules[m[m\r\n[13;1H\r[K:[K",,terminal_output
|
| 110 |
+
109,205135,"TERMINAL",0,0,"\r[K[HM logging-variants[m[m\r\n[13;1H\r[K:[K\r[K[HM* [32minput_pipeline/add-npy2array_record[m[m\r\n[13;1H\r[K:[K",,terminal_output
|
| 111 |
+
110,205246,"TERMINAL",0,0,"\r[K[HM grain-dataloader[m[m\r\n[13;1H\r[K:[K\r[K[HM grad-norm-log-and-clip[m[m\r\n[13;1H\r[K:[K\r[K[HM fix/spatiotemporal-pe-once-in-STTransformer[m[m\r\n[13;1H\r[K:[K",,terminal_output
|
| 112 |
+
111,205453,"TERMINAL",0,0,"\r[K[HM fix-transformer-forwardpass[m[m\r\n[13;1H\r[K:[K",,terminal_output
|
| 113 |
+
112,206225,"TERMINAL",0,0,"\r[K[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 114 |
+
113,210801,"TERMINAL",0,0,"sync-runner",,terminal_command
|
| 115 |
+
114,210870,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output
|
| 116 |
+
115,215172,"TERMINAL",0,0,"r",,terminal_output
|
| 117 |
+
116,215388,"TERMINAL",0,0,"u",,terminal_output
|
| 118 |
+
117,215581,"TERMINAL",0,0,"n",,terminal_output
|
| 119 |
+
118,215996,"TERMINAL",0,0,"n",,terminal_output
|
| 120 |
+
119,216184,"TERMINAL",0,0,"./\r\nREADME.md\r\natari.log\r\ngenerate_dataset.py\r\ngenie.py\r\nrequirements.txt\r\nsample.py\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\n",,terminal_output
|
| 121 |
+
120,216446,"TERMINAL",0,0,"e",,terminal_output
|
| 122 |
+
121,216499,"TERMINAL",0,0,"r",,terminal_output
|
| 123 |
+
122,217367,"TERMINAL",0,0,"input_pipeline/download/\r\ninput_pipeline/download/download_array_records.sh\r\ninput_pipeline/download/openai/\r\ninput_pipeline/download/openai/download_actions_files.py\r\ninput_pipeline/preprocess/\r\ninput_pipeline/preprocess/pngs_to_array_records.py\r\ninput_pipeline/preprocess/video_to_array_records.py\r\nmodels/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nslurm/jobs/mihir/horeka/\r\nslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nslurm/jobs/mihir/horeka/lam/coinrun/tmp\r\nslurm/jobs/mihir/horeka/lamap/\r\nslurm/jobs/mihir/horeka/lamap/coinrun_lam_base.sbatch\r\nslurm/jobs/mihir/horeka/lamap/coinrun_lam_base_dev.sh\r\nslurm/jobs/mihir/horeka/preprocessing/\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\nslurm/jobs/mihir/horeka/preprocessing/preprocess_atari.sbatch\r\nslurm/jobs/mihir/horeka/preprocessing/preprocess_minecraft_multi_chunk.sbatch\r\nslurm/jobs/mihir/horeka/preprocessing/preprocess_minecraft_single_chunk.sbatch\r\ntests/\r\ntests/test_dataloader.py\r\nutils/\r\nutils/dataloader.py\r\nutils/lr_utils.py\r\nutils/nn.py\r\nutils/parameter_utils.py\r\nutils/preprocess.py\r\n",,terminal_output
|
| 124 |
+
123,217724,"TERMINAL",0,0,"\r\nsent 317,315 bytes received 839 bytes 42,420.53 bytes/sec\r\ntotal size is 128,665,823 speedup is 404.41\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 125 |
+
124,218194,"TERMINAL",0,0,"runner",,terminal_command
|
| 126 |
+
125,221290,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_command
|
| 127 |
+
126,221368,"TERMINAL",0,0,"]633;CSubmitted batch job 3482644\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine_jobs",,terminal_output
|
| 128 |
+
127,224747,"TERMINAL",0,0,"dev",,terminal_command
|
| 129 |
+
128,225736,"TERMINAL",0,0,"queue",,terminal_command
|
| 130 |
+
129,225829,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;85Hhkn1993.localdomain: Wed Sep 10 12:20:43 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3482644 accelerat train_to tum_cte0 PD\t0:00\t 1 (None)[13;129H",,terminal_output
|
| 131 |
+
130,226945,"TERMINAL",0,0,"[1;124H4[13d\t",,terminal_output
|
| 132 |
+
131,227019,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 133 |
+
132,228854,"TERMINAL",0,0,"idling",,terminal_command
|
| 134 |
+
133,228898,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: sinfo_t_idle[1;85Hhkn1993.localdomain: Wed Sep 10 12:20:46 2025[3;1HPartition dev_cpuonly[3;35H: 10 nodes idle\r[4dPartition cpuonly[4;35H: 21 nodes idle\r[5dPartition dev_accelerated[5;35H:\t 1 nodes idle\r[6dPartition accelerated[6;35H: 26 nodes idle\r[7dPartition dev_accelerated-h100 :\t 0 nodes idle\r[8dPartition accelerated-h100[8;35H:\t 0 nodes idle\r[9dPartition large[9;35H:\t 4 nodes idle\r[10dPartition accelerated-h200[10;35H:\t 0 nodes idle[13;129H",,terminal_output
|
| 135 |
+
134,229911,"TERMINAL",0,0,"[1;124H7[13d\t",,terminal_output
|
| 136 |
+
135,231057,"TERMINAL",0,0,"[1;124H8[13d\t",,terminal_output
|
| 137 |
+
136,231792,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 138 |
+
137,234616,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1169,0,"",shellscript,selection_mouse
|
| 139 |
+
138,234626,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1168,0,"",shellscript,selection_command
|
| 140 |
+
139,236891,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"",shellscript,selection_mouse
|
| 141 |
+
140,237873,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"d",shellscript,content
|
| 142 |
+
141,237875,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,0,"",shellscript,selection_keyboard
|
| 143 |
+
142,238030,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,0,"e",shellscript,content
|
| 144 |
+
143,238031,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",113,0,"",shellscript,selection_keyboard
|
| 145 |
+
144,238265,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",113,0,"v",shellscript,content
|
| 146 |
+
145,238267,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"",shellscript,selection_keyboard
|
| 147 |
+
146,238452,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,":",shellscript,content
|
| 148 |
+
147,238453,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",115,0,"",shellscript,selection_keyboard
|
| 149 |
+
148,239090,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,1,"",shellscript,content
|
| 150 |
+
149,239374,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"_",shellscript,content
|
| 151 |
+
150,239375,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",115,0,"",shellscript,selection_keyboard
|
| 152 |
+
151,239546,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"",shellscript,selection_command
|
| 153 |
+
152,247615,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_command
|
| 154 |
+
153,247667,"TERMINAL",0,0,"]633;C",,terminal_output
|
| 155 |
+
154,247712,"TERMINAL",0,0,"Submitted batch job 3482647\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 156 |
+
155,249746,"TERMINAL",0,0,"queue",,terminal_command
|
| 157 |
+
156,249824,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;85Hhkn1993.localdomain: Wed Sep 10 12:21:07 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3482644 accelerat train_to tum_cte0 PD\t0:00\t 1 (None)[5;12H3482647 dev_accel train_to tum_cte0 PD\t0:00\t 1 (None)[13;129H",,terminal_output
|
| 158 |
+
157,250836,"TERMINAL",0,0,"[1;124H8[13d\t",,terminal_output
|
| 159 |
+
158,251401,"TERMINAL",0,0,"[1;124H9[13d\t",,terminal_output
|
| 160 |
+
159,252587,"TERMINAL",0,0,"[1;123H10[13d\t",,terminal_output
|
| 161 |
+
160,253384,"TERMINAL",0,0,"[1;124H1[13d\t",,terminal_output
|
| 162 |
+
161,254366,"TERMINAL",0,0,"[1;124H2[13d\t",,terminal_output
|
| 163 |
+
162,255768,"TERMINAL",0,0,"[1;124H3[13d\t",,terminal_output
|
| 164 |
+
163,256389,"TERMINAL",0,0,"[1;124H4[13d\t",,terminal_output
|
| 165 |
+
164,257583,"TERMINAL",0,0,"[1;124H5[13d\t",,terminal_output
|
| 166 |
+
165,258532,"TERMINAL",0,0,"[1;124H6[13d\t",,terminal_output
|
| 167 |
+
166,259563,"TERMINAL",0,0,"[1;124H7[13d\t",,terminal_output
|
| 168 |
+
167,260772,"TERMINAL",0,0,"[1;124H8[13d\t",,terminal_output
|
| 169 |
+
168,261809,"TERMINAL",0,0,"[1;124H9[4;48H R[4;69Hhkn0817[5;48H R[5;69Hhkn0401[13;129H",,terminal_output
|
| 170 |
+
169,262715,"TERMINAL",0,0,"[1;123H20[4;60H1[5d1[13;129H",,terminal_output
|
| 171 |
+
170,263698,"TERMINAL",0,0,"[1;124H1[4;60H2[5d2[13;129H",,terminal_output
|
| 172 |
+
171,264824,"TERMINAL",0,0,"[1;124H2[4;60H3[5d3[13;129H",,terminal_output
|
| 173 |
+
172,266036,"TERMINAL",0,0,"[1;124H3[4;60H4[5d4[13;129H",,terminal_output
|
| 174 |
+
173,267051,"TERMINAL",0,0,"[1;124H4[4;60H5[5d5[13;129H",,terminal_output
|
| 175 |
+
174,268023,"TERMINAL",0,0,"[1;124H5[4;60H6[5d6[13;129H",,terminal_output
|
| 176 |
+
175,269152,"TERMINAL",0,0,"[1;124H6[4;60H7[5d7[13;129H",,terminal_output
|
| 177 |
+
176,269932,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 178 |
+
177,271743,"TERMINAL",0,0,"scancel 3482647",,terminal_command
|
| 179 |
+
178,273551,"TERMINAL",0,0,"queue",,terminal_command
|
| 180 |
+
179,273672,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;85Hhkn1993.localdomain: Wed Sep 10 12:21:31 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3482644 accelerat train_to tum_cte0 R\t0:12\t 1 hkn0817[5;12H3482647 dev_accel train_to tum_cte0 CG\t0:10\t 1 hkn0401[13;129H",,terminal_output
|
| 181 |
+
180,274783,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 182 |
+
181,275816,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 183 |
+
182,276724,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 184 |
+
183,277771,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 185 |
+
184,278845,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 186 |
+
185,279974,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 187 |
+
186,280965,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 188 |
+
187,281946,"TERMINAL",0,0,"[1;124H9[4;59H21[13;129H",,terminal_output
|
| 189 |
+
188,282997,"TERMINAL",0,0,"[1;123H41[4;60H2[13;129H",,terminal_output
|
| 190 |
+
189,284101,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 191 |
+
190,285163,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 192 |
+
191,286171,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 193 |
+
192,287241,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 194 |
+
193,288245,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 195 |
+
194,289328,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 196 |
+
195,290315,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 197 |
+
196,291476,"TERMINAL",0,0,"[1;124H9[4;59H30[13;129H",,terminal_output
|
| 198 |
+
197,292455,"TERMINAL",0,0,"[1;123H50[4;60H1[13;129H",,terminal_output
|
| 199 |
+
198,293620,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 200 |
+
199,294567,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 201 |
+
200,295616,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 202 |
+
201,296699,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 203 |
+
202,297921,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 204 |
+
203,298894,"TERMINAL",0,0,"\r[5d[J[1;124H6[4;60H7[13;129H",,terminal_output
|
| 205 |
+
204,300073,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 206 |
+
205,300891,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 207 |
+
206,301977,"TERMINAL",0,0,"[1;124H9[4;59H40[13;129H",,terminal_output
|
| 208 |
+
207,303149,"TERMINAL",0,0,"[1;121H2:00[4;60H1[13;129H",,terminal_output
|
| 209 |
+
208,304050,"TERMINAL",0,0,"[1;124H1[4;60H3[13;129H",,terminal_output
|
| 210 |
+
209,305117,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 211 |
+
210,306284,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 212 |
+
211,307253,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 213 |
+
212,308301,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 214 |
+
213,309194,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 215 |
+
214,310308,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 216 |
+
215,311648,"TERMINAL",0,0,"[1;124H9[4;59H50[13;129H",,terminal_output
|
| 217 |
+
216,312324,"TERMINAL",0,0,"[1;123H10[4;60H1[13;129H",,terminal_output
|
| 218 |
+
217,313431,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 219 |
+
218,314407,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 220 |
+
219,315524,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 221 |
+
220,316647,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 222 |
+
221,317636,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 223 |
+
222,318576,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 224 |
+
223,319737,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 225 |
+
224,320817,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 226 |
+
225,322348,"TERMINAL",0,0,"[1;124H9[4;57H1:00[13;129H",,terminal_output
|
| 227 |
+
226,322575,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1700,0,"",shellscript,selection_mouse
|
| 228 |
+
227,322612,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1699,0,"",shellscript,selection_command
|
| 229 |
+
228,322935,"TERMINAL",0,0,"[1;123H20[4;60H1[13;129H",,terminal_output
|
| 230 |
+
229,323973,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 231 |
+
230,325218,"TERMINAL",0,0,"",,terminal_focus
|
| 232 |
+
231,325509,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 233 |
+
232,326120,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 234 |
+
233,327369,"TERMINAL",0,0,"[1;124H4[4;60H6[13;129H",,terminal_output
|
| 235 |
+
234,328332,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 236 |
+
235,328998,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command
|
| 237 |
+
236,329132,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 238 |
+
237,329190,"TERMINAL",0,0,"logs",,terminal_command
|
| 239 |
+
238,329282,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output
|
| 240 |
+
239,329373,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 241 |
+
240,330304,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 242 |
+
241,331470,"TERMINAL",0,0,"[1;124H9[4;59H10[13;129H",,terminal_output
|
| 243 |
+
242,332525,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab
|
| 244 |
+
243,332880,"TERMINAL",0,0,"[1;123H30[4;60H1[13;129H",,terminal_output
|
| 245 |
+
244,333323,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 246 |
+
245,334277,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 247 |
+
246,335925,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 248 |
+
247,336549,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 249 |
+
248,337407,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 250 |
+
249,338539,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 251 |
+
250,339614,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 252 |
+
251,340593,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 253 |
+
252,341627,"TERMINAL",0,0,"[1;124H9[4;59H20[13;129H",,terminal_output
|
| 254 |
+
253,342604,"TERMINAL",0,0,"[1;123H40[4;60H1[13;129H",,terminal_output
|
| 255 |
+
254,343758,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 256 |
+
255,344553,"TERMINAL",0,0,"cd coinrun/tokenizer/",,terminal_command
|
| 257 |
+
256,344755,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 258 |
+
257,344836,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 259 |
+
258,345816,"TERMINAL",0,0,"ls",,terminal_command
|
| 260 |
+
259,345934,"TERMINAL",0,0,"]633;Ctrain_tokenizer_1e-4_3414046.log train_tokenizer_1e-4_3482644.log train_tokenizer_1e-4_3482647.log\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 261 |
+
260,345982,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 262 |
+
261,346913,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 263 |
+
262,347913,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 264 |
+
263,349088,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 265 |
+
264,349962,"TERMINAL",0,0,"[1;124H7[4;60H9[13;129H",,terminal_output
|
| 266 |
+
265,351152,"TERMINAL",0,0,"[1;124H9[4;59H30[13;129H",,terminal_output
|
| 267 |
+
266,352109,"TERMINAL",0,0,"[1;123H50[4;60H1[13;129H",,terminal_output
|
| 268 |
+
267,352869,"TERMINAL",0,0,"tail -f train_tokenizer_1e-4_3482644.log",,terminal_command
|
| 269 |
+
268,352975,"TERMINAL",0,0,"]633;C num_devices = jax.device_count()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\r\n return int(get_backend(backend).device_count())\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\r\n return _get_backend_uncached(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\r\n bs = backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\r\n raise RuntimeError(err_msg)\r\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\r\n",,terminal_output
|
| 270 |
+
269,353132,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 271 |
+
270,354488,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 272 |
+
271,355378,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 273 |
+
272,356434,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 274 |
+
273,356838,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 275 |
+
274,357606,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 276 |
+
275,358469,"TERMINAL",0,0,"watch",,terminal_focus
|
| 277 |
+
276,358548,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 278 |
+
277,359403,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 279 |
+
278,359637,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 280 |
+
279,361610,"TERMINAL",0,0,"scancel 3482644",,terminal_command
|
| 281 |
+
280,361666,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 282 |
+
281,362935,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab
|
| 283 |
+
282,364016,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",634,0,"",shellscript,selection_mouse
|
| 284 |
+
283,364034,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",633,0,"",shellscript,selection_command
|
| 285 |
+
284,366006,"TERMINAL",0,0,"bash",,terminal_focus
|
| 286 |
+
285,369355,"TERMINAL",0,0,"bash",,terminal_focus
|
| 287 |
+
286,376821,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab
|
| 288 |
+
287,378682,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",635,0,"",shellscript,selection_command
|
| 289 |
+
288,381176,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_command
|
| 290 |
+
289,383036,"TERMINAL",0,0,"bash",,terminal_focus
|
| 291 |
+
290,384023,"TERMINAL",0,0,"bash",,terminal_focus
|
| 292 |
+
291,385355,"TERMINAL",0,0,"queue",,terminal_command
|
| 293 |
+
292,385425,"TERMINAL",0,0,"]633;C[?1049h[22;0;0t[1;13r(B[m[4l[?7h[H[2JEvery 1.0s: squeue --me[1;85Hhkn1993.localdomain: Wed Sep 10 12:23:23 2025[3;14HJOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)[4;12H3482644 accelerat train_to tum_cte0 CG\t1:40\t 1 hkn0817[5;12H3482648 dev_accel train_to tum_cte0 R\t0:04\t 1 hkn0401[13;129H",,terminal_output
|
| 294 |
+
293,386467,"TERMINAL",0,0,"[1;124H4[5;60H5[13;129H",,terminal_output
|
| 295 |
+
294,386791,"TERMINAL",0,0,"bash",,terminal_focus
|
| 296 |
+
295,387303,"TERMINAL",0,0,"[1;124H5[5;60H6[13;129H",,terminal_output
|
| 297 |
+
296,388315,"TERMINAL",0,0,"[1;124H6[5;60H7[13;129H",,terminal_output
|
| 298 |
+
297,388656,"TERMINAL",0,0,"ls",,terminal_command
|
| 299 |
+
298,388730,"TERMINAL",0,0,"]633;Ctrain_tokenizer_1e-4_3414046.log train_tokenizer_1e-4_3482644.log train_tokenizer_1e-4_3482647.log\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 300 |
+
299,389462,"TERMINAL",0,0,"[1;124H7[5;60H8[13;129H",,terminal_output
|
| 301 |
+
300,390448,"TERMINAL",0,0,"[1;124H8[5;60H9[13;129H",,terminal_output
|
| 302 |
+
301,391051,"TERMINAL",0,0,"ls",,terminal_command
|
| 303 |
+
302,391098,"TERMINAL",0,0,"]633;Ctrain_tokenizer_1e-4_3414046.log train_tokenizer_1e-4_3482644.log train_tokenizer_1e-4_3482647.log\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 304 |
+
303,391576,"TERMINAL",0,0,"[1;124H9[5;59H10[13;129H",,terminal_output
|
| 305 |
+
304,392609,"TERMINAL",0,0,"[1;123H30[5;60H1[13;129H",,terminal_output
|
| 306 |
+
305,393716,"TERMINAL",0,0,"[1;124H1[5;60H2[13;129H",,terminal_output
|
| 307 |
+
306,394835,"TERMINAL",0,0,"[1;124H2[5;60H3[13;129H",,terminal_output
|
| 308 |
+
307,395882,"TERMINAL",0,0,"[1;124H3[5;60H4[13;129H",,terminal_output
|
| 309 |
+
308,397039,"TERMINAL",0,0,"[1;124H4[5;60H5[13;129H",,terminal_output
|
| 310 |
+
309,397883,"TERMINAL",0,0,"[1;124H5[5;60H6[13;129H",,terminal_output
|
| 311 |
+
310,398873,"TERMINAL",0,0,"[1;124H6[5;60H7[13;129H",,terminal_output
|
| 312 |
+
311,399115,"TERMINAL",0,0,"tail -f train_tokenizer_1e-4_3482648.log",,terminal_command
|
| 313 |
+
312,399214,"TERMINAL",0,0,"]633;Ctail: cannot open 'train_tokenizer_1e-4_3482648.log' for reading: No such file or directory\r\ntail: no files remaining\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 314 |
+
313,400134,"TERMINAL",0,0,"[1;124H7[5;60H8[13;129H",,terminal_output
|
| 315 |
+
314,400982,"TERMINAL",0,0,"[1;124H8[5;60H9[13;129H",,terminal_output
|
| 316 |
+
315,401513,"TERMINAL",0,0,"tail -f train_tokenizer_1e-4_3482648.log",,terminal_command
|
| 317 |
+
316,401618,"TERMINAL",0,0,"]633;Ctail: cannot open 'train_tokenizer_1e-4_3482648.log' for reading: No such file or directory\r\ntail: no files remaining\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 318 |
+
317,402363,"TERMINAL",0,0,"[1;124H9[5;59H20[13;129H",,terminal_output
|
| 319 |
+
318,403391,"TERMINAL",0,0,"[1;123H40[5;60H2[13;129H",,terminal_output
|
| 320 |
+
319,404432,"TERMINAL",0,0,"[1;124H2[5;60H3[13;129H",,terminal_output
|
| 321 |
+
320,405435,"TERMINAL",0,0,"[1;124H3[5;60H4[13;129H",,terminal_output
|
| 322 |
+
321,406724,"TERMINAL",0,0,"[1;124H4[5;60H5[13;129H",,terminal_output
|
| 323 |
+
322,407758,"TERMINAL",0,0,"\r[5d[J[1;124H5[4;18H8 dev_accel[4;48H R\t0:26[4;73H401[13;129H",,terminal_output
|
| 324 |
+
323,408155,"TERMINAL",0,0,"lw",,terminal_command
|
| 325 |
+
324,408223,"TERMINAL",0,0,"]633;Cbash: lw: command not found...\r\n",,terminal_output
|
| 326 |
+
325,408732,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 327 |
+
326,409500,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 328 |
+
327,409714,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 329 |
+
328,410600,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 330 |
+
329,410919,"TERMINAL",0,0,"^C",,terminal_command
|
| 331 |
+
330,411007,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 332 |
+
331,411549,"TERMINAL",0,0,"ls",,terminal_command
|
| 333 |
+
332,411676,"TERMINAL",0,0,"]633;Ctrain_tokenizer_1e-4_3414046.log train_tokenizer_1e-4_3482647.log\r\ntrain_tokenizer_1e-4_3482644.log train_tokenizer_1e-4_3482648.log\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 334 |
+
333,411725,"TERMINAL",0,0,"[1;124H9[4;59H30[13;129H",,terminal_output
|
| 335 |
+
334,412872,"TERMINAL",0,0,"[1;123H50[4;60H1[13;129H",,terminal_output
|
| 336 |
+
335,413968,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 337 |
+
336,414808,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 338 |
+
337,415081,"TERMINAL",0,0,"tail -f train_tokenizer_1e-4_3482648.log",,terminal_command
|
| 339 |
+
338,415163,"TERMINAL",0,0,"]633;CSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3482648\r\nSLURM_NODEID=0\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=train_tokenizer_1e-4\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\nGpuFreq=control_disabled\r\n",,terminal_output
|
| 340 |
+
339,415604,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 341 |
+
340,416706,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 342 |
+
341,417794,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 343 |
+
342,418820,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 344 |
+
343,419823,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 345 |
+
344,420920,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 346 |
+
345,422085,"TERMINAL",0,0,"[1;124H9[4;59H40[13;129H",,terminal_output
|
| 347 |
+
346,423090,"TERMINAL",0,0,"[1;121H4:00[4;60H1[13;129H",,terminal_output
|
| 348 |
+
347,424162,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 349 |
+
348,425508,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 350 |
+
349,426448,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 351 |
+
350,427243,"TERMINAL",0,0,"[1;124H4[4;60H6[13;129H",,terminal_output
|
| 352 |
+
351,428362,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 353 |
+
352,428476,"TERMINAL",0,0,"2025-09-10 12:24:06.019374: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:1: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\r\n backend = _init_backend(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\r\n backend = registration.factory()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\r\n return xla_client.make_c_api_client(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\r\n2025-09-10 12:24:06.023260: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:3: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\r\n backend = _init_backend(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\r\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\r\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\r\n backend = registration.factory()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\r\n num_devices = jax.device_count()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\r\n return xla_client.make_c_api_client(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\r\n return int(get_backend(backend).device_count())\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\r\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\r\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\r\n return _get_backend_uncached(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\r\n num_devices = jax.device_count()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\r\n bs = backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\r\n return int(get_backend(backend).device_count())\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\r\n raise RuntimeError(err_msg)\r\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\r\n return _get_backend_uncached(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\r\n bs = backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\r\n raise RuntimeError(err_msg)\r\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\r\n2025-09-10 12:24:06.035570: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:2: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\r\n backend = _init_backend(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\r\n backend = registration.factory()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\r\n return xla_client.make_c_api_client(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\r\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\r\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\r\n num_devices = jax.device_count()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\r\n return int(get_backend(backend).device_count())\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\r\n return _get_backend_uncached(platform)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\r\n bs = backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\r\n raise RuntimeError(err_msg)\r\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\r\n",,terminal_output
|
| 354 |
+
353,429404,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 355 |
+
354,430463,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 356 |
+
355,431556,"TERMINAL",0,0,"[1;124H9[4;59H50[13;129H",,terminal_output
|
| 357 |
+
356,432737,"TERMINAL",0,0,"[1;123H10[4;60H1[13;129H",,terminal_output
|
| 358 |
+
357,433906,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 359 |
+
358,435071,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 360 |
+
359,435948,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 361 |
+
360,436767,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 362 |
+
361,438034,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 363 |
+
362,439498,"TERMINAL",0,0,"[1;124H6[4;60H7[13;129H",,terminal_output
|
| 364 |
+
363,440592,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 365 |
+
364,441652,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 366 |
+
365,442246,"TERMINAL",0,0,"[1;124H9[4;57H1:00[13;129H",,terminal_output
|
| 367 |
+
366,443079,"TERMINAL",0,0,"[1;123H20[4;60H1[13;129H",,terminal_output
|
| 368 |
+
367,444770,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 369 |
+
368,445738,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 370 |
+
369,446864,"TERMINAL",0,0,"[1;124H3[4;60H4[13;129H",,terminal_output
|
| 371 |
+
370,447823,"TERMINAL",0,0,"[1;124H4[4;60H5[13;129H",,terminal_output
|
| 372 |
+
371,449009,"TERMINAL",0,0,"[1;124H5[4;60H6[13;129H",,terminal_output
|
| 373 |
+
372,449577,"TERMINAL",0,0,"[1;124H7[4;60H8[13;129H",,terminal_output
|
| 374 |
+
373,450598,"TERMINAL",0,0,"[1;124H8[4;60H9[13;129H",,terminal_output
|
| 375 |
+
374,451909,"TERMINAL",0,0,"[1;124H9[4;59H10[13;129H",,terminal_output
|
| 376 |
+
375,453162,"TERMINAL",0,0,"[1;123H30[4;60H1[13;129H",,terminal_output
|
| 377 |
+
376,453568,"TERMINAL",0,0,"watch",,terminal_focus
|
| 378 |
+
377,454215,"TERMINAL",0,0,"[1;124H1[4;60H2[13;129H",,terminal_output
|
| 379 |
+
378,454538,"TERMINAL",0,0,"[1;124H2[4;60H3[13;129H",,terminal_output
|
| 380 |
+
379,454763,"TERMINAL",0,0,"[13;1H[?1049l[23;0;0t\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output
|
| 381 |
+
380,457423,"TERMINAL",0,0,"scancel 3482648",,terminal_command
|
| 382 |
+
381,458058,"TERMINAL",0,0,"tail",,terminal_focus
|
| 383 |
+
382,458199,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** JOB 3482648 ON hkn0401 CANCELLED AT 2025-09-10T12:24:35 ***\r\nsrun: got SIGCONT\r\nsrun: forcing job termination\r\nslurmstepd: error: *** STEP 3482648.0 ON hkn0401 CANCELLED AT 2025-09-10T12:24:35 ***\r\n",,terminal_output
|
| 384 |
+
383,458489,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 385 |
+
384,461173,"TERMINAL",0,0,"ls",,terminal_command
|
| 386 |
+
385,461483,"TERMINAL",0,0,"]633;Ctrain_tokenizer_1e-4_3414046.log train_tokenizer_1e-4_3482647.log\r\ntrain_tokenizer_1e-4_3482644.log train_tokenizer_1e-4_3482648.log\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer",,terminal_output
|
| 387 |
+
386,463277,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/train_tokenizer_1e-4_3482648.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --log \\n --name=coinrun-tokenizer-dataset-test-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 250 \\n --data_dir $array_records_dir_train\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=3106821\nSLURM_JOB_GPUS=0\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\nSLURMD_NODENAME=hkn0401\nSLURM_JOB_START_TIME=1757499799\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1757500999\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24\nSLURM_GPUS_ON_NODE=1\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=dev_accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3482648\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=4\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0401\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=4\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1993.localdomain\nSLURM_JOB_ID=3482648\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_tokenizer_1e-4\nSLURM_NTASKS_PER_NODE=4\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0401\nGpuFreq=control_disabled\n2025-09-10 12:24:06.019374: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:1: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n2025-09-10 12:24:06.023260: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:3: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\n2025-09-10 12:24:06.035570: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:2: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_tokenizer.py"", line 145, in <module>\n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\nslurmstepd: error: *** JOB 3482648 ON hkn0401 CANCELLED AT 2025-09-10T12:24:35 ***\nsrun: got SIGCONT\nsrun: forcing job termination\nslurmstepd: error: *** STEP 3482648.0 ON hkn0401 CANCELLED AT 2025-09-10T12:24:35 ***\n",log,tab
|
| 388 |
+
387,485481,"TERMINAL",0,0,"bash",,terminal_focus
|
| 389 |
+
388,486531,"TERMINAL",0,0,"bash",,terminal_focus
|
| 390 |
+
389,499618,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command
|
| 391 |
+
390,499686,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3482653\r\nsalloc: job 3482653 queued and waiting for resources\r\n",,terminal_output
|
| 392 |
+
391,500971,"TERMINAL",0,0,"salloc: job 3482653 has been allocated resources\r\nsalloc: Granted job allocation 3482653\r\n",,terminal_output
|
| 393 |
+
392,501073,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output
|
| 394 |
+
393,503080,"TERMINAL",0,0,"s",,terminal_output
|
| 395 |
+
394,503220,"TERMINAL",0,0,"o",,terminal_output
|
| 396 |
+
395,503324,"TERMINAL",0,0,"u",,terminal_output
|
| 397 |
+
396,503379,"TERMINAL",0,0,"r",,terminal_output
|
| 398 |
+
397,503609,"TERMINAL",0,0,"c",,terminal_output
|
| 399 |
+
398,503739,"TERMINAL",0,0,"e",,terminal_output
|
| 400 |
+
399,503850,"TERMINAL",0,0," ",,terminal_output
|
| 401 |
+
400,503927,"TERMINAL",0,0,".",,terminal_output
|
| 402 |
+
401,504031,"TERMINAL",0,0,"v",,terminal_output
|
| 403 |
+
402,504533,"TERMINAL",0,0,"e",,terminal_output
|
| 404 |
+
403,504676,"TERMINAL",0,0,"n",,terminal_output
|
| 405 |
+
404,504866,"TERMINAL",0,0,"v",,terminal_output
|
| 406 |
+
405,505245,"TERMINAL",0,0,"/",,terminal_output
|
| 407 |
+
406,505439,"TERMINAL",0,0,"b",,terminal_output
|
| 408 |
+
407,505799,"TERMINAL",0,0,"i",,terminal_output
|
| 409 |
+
408,505912,"TERMINAL",0,0,"n",,terminal_output
|
| 410 |
+
409,506619,"TERMINAL",0,0,"/",,terminal_output
|
| 411 |
+
410,506849,"TERMINAL",0,0,"a",,terminal_output
|
| 412 |
+
411,506986,"TERMINAL",0,0,"c",,terminal_output
|
| 413 |
+
412,507149,"TERMINAL",0,0,"c",,terminal_output
|
| 414 |
+
413,507284,"TERMINAL",0,0,"e",,terminal_output
|
| 415 |
+
414,507360,"TERMINAL",0,0,"l",,terminal_output
|
| 416 |
+
415,507783,"TERMINAL",0,0,"e",,terminal_output
|
| 417 |
+
416,507961,"TERMINAL",0,0,"r",,terminal_output
|
| 418 |
+
417,508262,"TERMINAL",0,0,"a",,terminal_output
|
| 419 |
+
418,508527,"TERMINAL",0,0,"t",,terminal_output
|
| 420 |
+
419,508662,"TERMINAL",0,0,"e",,terminal_output
|
| 421 |
+
420,509062,"TERMINAL",0,0,"\r\n",,terminal_output
|
| 422 |
+
421,513649,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab
|
| 423 |
+
422,518757,"TERMINAL",0,0,"sh",,terminal_output
|
| 424 |
+
423,518865,"TERMINAL",0,0," ",,terminal_output
|
| 425 |
+
424,519419,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output
|
| 426 |
+
425,520348,"TERMINAL",0,0,"\r\n",,terminal_output
|
| 427 |
+
426,522468,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab
|
| 428 |
+
427,528106,"TERMINAL",0,0,"salloc: Nodes hkn0403 are ready for job\r\n",,terminal_output
|
| 429 |
+
428,528277,"TERMINAL",0,0,"source .venv/bin/accelerate\r\nsh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\n",,terminal_output
|
| 430 |
+
429,529767,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer[?2004h[tum_cte0515@hkn0403 tokenizer]$ source .venv/bin/accelerate\r\n[?2004l\rbash: .venv/bin/accelerate: No such file or directory\r\n]0;tum_cte0515@hkn0403:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer[?2004h[tum_cte0515@hkn0403 tokenizer]$ sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\n[?2004l\rsh: slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh: No such file or directory\r\n]0;tum_cte0515@hkn0403:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer[?2004h[tum_cte0515@hkn0403 tokenizer]$ ",,terminal_output
|
| 431 |
+
430,533110,"TERMINAL",0,0,"d",,terminal_output
|
| 432 |
+
431,533581,"TERMINAL",0,0,"e",,terminal_output
|
| 433 |
+
432,535138,"TERMINAL",0,0,"v",,terminal_output
|
| 434 |
+
433,535365,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h[tum_cte0515@hkn0403 jasmine]$ ",,terminal_output
|
| 435 |
+
434,535668,"TERMINAL",0,0,"dev",,terminal_output
|
| 436 |
+
435,535948,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output
|
| 437 |
+
436,536284,"TERMINAL",0,0,"\r[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[37Pource .venv/bin/accelerate",,terminal_output
|
| 438 |
+
437,536890,"TERMINAL",0,0,"\r\n[?2004l\rbash: .venv/bin/accelerate: No such file or directory\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h[tum_cte0515@hkn0403 jasmine]$ ",,terminal_output
|
| 439 |
+
438,537365,"TERMINAL",0,0,"source .venv/bin/accelerate",,terminal_output
|
| 440 |
+
439,537540,"TERMINAL",0,0,"dev[K",,terminal_output
|
| 441 |
+
440,538048,"TERMINAL",0,0,"source .venv/bin/accelerate",,terminal_output
|
| 442 |
+
441,538858,"TERMINAL",0,0,"[K",,terminal_output
|
| 443 |
+
442,542000,"TERMINAL",0,0,"source .venv/bin/accelerate",,terminal_output
|
| 444 |
+
443,542619,"TERMINAL",0,0,"[K",,terminal_output
|
| 445 |
+
444,543719,"TERMINAL",0,0,"[K",,terminal_output
|
| 446 |
+
445,543863,"TERMINAL",0,0,"[K[K",,terminal_output
|
| 447 |
+
446,544014,"TERMINAL",0,0,"[K",,terminal_output
|
| 448 |
+
447,544278,"TERMINAL",0,0,"[K",,terminal_output
|
| 449 |
+
448,544423,"TERMINAL",0,0,"[K",,terminal_output
|
| 450 |
+
449,544824,"TERMINAL",0,0,"",,terminal_output
|
| 451 |
+
450,545254,"TERMINAL",0,0,"[K",,terminal_output
|
| 452 |
+
451,545374,"TERMINAL",0,0,"tivate",,terminal_output
|
| 453 |
+
452,545932,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output
|
| 454 |
+
453,546405,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output
|
| 455 |
+
454,546639,"TERMINAL",0,0,"celerate",,terminal_output
|
| 456 |
+
455,547159,"TERMINAL",0,0,"dev[K",,terminal_output
|
| 457 |
+
456,547780,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output
|
| 458 |
+
457,548519,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --log \\r\n --name=coinrun-tokenizer-dataset-test-$slurm_job_id \\r\n --tags tokenizer coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 250 \\r\n --data_dir $array_records_dir_train\r\n",,terminal_output
|
| 459 |
+
458,548758,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=67647\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757499919\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757503519\r\nSLURM_PMI2_SRUN_PORT=32775\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3482653\r\nSLURM_PTY_PORT=44507\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=32\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=129\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43229\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3482653\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43229\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output
|
| 460 |
+
459,548839,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output
|
| 461 |
+
460,571084,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output
|
| 462 |
+
461,572176,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250910_122628-qot85m4u\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-tokenizer-dataset-test-3482653\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/qot85m4u\r\n",,terminal_output
|
| 463 |
+
462,573635,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nStarting training from step 0...\r\n",,terminal_output
|
927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9c2d9ac2-2076-4ff2-8381-5264acd089541759350296590-2025_10_01-22.25.31.836/source.csv
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type
|
| 2 |
+
1,5,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_4_action_prepend_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_4 action_prepend_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=4 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab
|
| 3 |
+
2,330,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:25:31 PM [info] Activating crowd-code\n10:25:31 PM [info] Recording started\n10:25:31 PM [info] Initializing git provider using file system watchers...\n",Log,tab
|
| 4 |
+
3,567,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:25:32 PM [info] Git repository found\n10:25:32 PM [info] Git provider initialized successfully\n10:25:32 PM [info] Initial git state: [object Object]\n",Log,content
|
| 5 |
+
4,373443,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"",shellscript,tab
|
| 6 |
+
5,373446,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",955,0,"",shellscript,selection_mouse
|
| 7 |
+
6,1320333,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"Switched from branch 'prepend-action-maskgit' to 'main'",shellscript,git_branch_checkout
|
| 8 |
+
7,1360341,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"Switched from branch 'main' to 'dynamics_coinrun_500m_dataset_29519'",shellscript,git_branch_checkout
|