Step... (31000/50000 | Loss: 1.604581594467163, Acc: 0.6744211912155151): 62%|█████████████████▍ | 31239/50000 [12:32:24<7:08:13, 1.37s/it]
Browse files- flax_model.msgpack +1 -1
- outputs/checkpoints/checkpoint-25000/training_state.json +0 -1
- outputs/checkpoints/checkpoint-26000/training_state.json +0 -1
- outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/config.json +0 -0
- outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/data_collator.joblib +0 -0
- outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/flax_model.msgpack +1 -1
- outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/optimizer_state.msgpack +1 -1
- outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/training_args.joblib +0 -0
- outputs/checkpoints/checkpoint-30000/training_state.json +1 -0
- outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/config.json +0 -0
- outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/data_collator.joblib +0 -0
- outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/flax_model.msgpack +1 -1
- outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/optimizer_state.msgpack +1 -1
- outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/training_args.joblib +0 -0
- outputs/checkpoints/checkpoint-31000/training_state.json +1 -0
- outputs/events.out.tfevents.1627258355.tablespoon.3000110.3.v2 +2 -2
- outputs/flax_model.msgpack +1 -1
- outputs/optimizer_state.msgpack +1 -1
- outputs/training_state.json +1 -1
- pytorch_model.bin +1 -1
- run_stream.512.log +0 -0
- wandb/run-20210726_001233-17u6inbn/files/output.log +1534 -0
- wandb/run-20210726_001233-17u6inbn/files/wandb-summary.json +1 -1
- wandb/run-20210726_001233-17u6inbn/logs/debug-internal.log +2 -2
- wandb/run-20210726_001233-17u6inbn/run-17u6inbn.wandb +2 -2
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 249750019
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f9fa29e534a0b52f07cbcbf30350fd1deb85efed567bf2b958e563dee74a936
|
3 |
size 249750019
|
outputs/checkpoints/checkpoint-25000/training_state.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"step": 25001}
|
|
|
|
outputs/checkpoints/checkpoint-26000/training_state.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"step": 26001}
|
|
|
|
outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/config.json
RENAMED
File without changes
|
outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/data_collator.joblib
RENAMED
File without changes
|
outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/flax_model.msgpack
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 249750019
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f490e826683c621d4235a0f6be4d6447e58fec7dcaedf37c9c6b23ac1179860
|
3 |
size 249750019
|
outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/optimizer_state.msgpack
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 499500278
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9cb63864317874863355bb060661587e535ec09909bdec8699a4445bd3e1d2bd
|
3 |
size 499500278
|
outputs/checkpoints/{checkpoint-25000 → checkpoint-30000}/training_args.joblib
RENAMED
File without changes
|
outputs/checkpoints/checkpoint-30000/training_state.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 30001}
|
outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/config.json
RENAMED
File without changes
|
outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/data_collator.joblib
RENAMED
File without changes
|
outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/flax_model.msgpack
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 249750019
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f9fa29e534a0b52f07cbcbf30350fd1deb85efed567bf2b958e563dee74a936
|
3 |
size 249750019
|
outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/optimizer_state.msgpack
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 499500278
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:74879c5519aa133f282d7c3b040183a11d8d6c38f0a04dde3bbcdb2f11f62b4e
|
3 |
size 499500278
|
outputs/checkpoints/{checkpoint-26000 → checkpoint-31000}/training_args.joblib
RENAMED
File without changes
|
outputs/checkpoints/checkpoint-31000/training_state.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 31001}
|
outputs/events.out.tfevents.1627258355.tablespoon.3000110.3.v2
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c50283b6813a75c683baf41ae4dc9ce87bc1ee96ad10f1cb1b2d5dd2d946fc32
|
3 |
+
size 4594839
|
outputs/flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 249750019
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f9fa29e534a0b52f07cbcbf30350fd1deb85efed567bf2b958e563dee74a936
|
3 |
size 249750019
|
outputs/optimizer_state.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 499500278
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:74879c5519aa133f282d7c3b040183a11d8d6c38f0a04dde3bbcdb2f11f62b4e
|
3 |
size 499500278
|
outputs/training_state.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"step":
|
|
|
1 |
+
{"step": 31001}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 498858859
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:549edf3725e96eec68d26df4f6f75ef221b3457f097cf83a673ce055b0ff4363
|
3 |
size 498858859
|
run_stream.512.log
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
wandb/run-20210726_001233-17u6inbn/files/output.log
CHANGED
@@ -19742,6 +19742,1540 @@ You should probably TRAIN this model on a down-stream task to be able to use it
|
|
19742 |
|
19743 |
|
19744 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19745 |
|
19746 |
|
19747 |
|
|
|
19742 |
|
19743 |
|
19744 |
|
19745 |
+
|
19746 |
+
|
19747 |
+
|
19748 |
+
|
19749 |
+
|
19750 |
+
|
19751 |
+
|
19752 |
+
|
19753 |
+
|
19754 |
+
|
19755 |
+
|
19756 |
+
|
19757 |
+
|
19758 |
+
|
19759 |
+
|
19760 |
+
|
19761 |
+
|
19762 |
+
|
19763 |
+
|
19764 |
+
|
19765 |
+
|
19766 |
+
|
19767 |
+
|
19768 |
+
|
19769 |
+
|
19770 |
+
|
19771 |
+
|
19772 |
+
|
19773 |
+
|
19774 |
+
|
19775 |
+
|
19776 |
+
|
19777 |
+
|
19778 |
+
|
19779 |
+
|
19780 |
+
|
19781 |
+
|
19782 |
+
|
19783 |
+
|
19784 |
+
|
19785 |
+
|
19786 |
+
|
19787 |
+
|
19788 |
+
|
19789 |
+
|
19790 |
+
|
19791 |
+
|
19792 |
+
|
19793 |
+
|
19794 |
+
|
19795 |
+
|
19796 |
+
|
19797 |
+
|
19798 |
+
|
19799 |
+
|
19800 |
+
|
19801 |
+
|
19802 |
+
|
19803 |
+
|
19804 |
+
|
19805 |
+
|
19806 |
+
|
19807 |
+
|
19808 |
+
|
19809 |
+
|
19810 |
+
|
19811 |
+
|
19812 |
+
|
19813 |
+
|
19814 |
+
|
19815 |
+
|
19816 |
+
|
19817 |
+
|
19818 |
+
|
19819 |
+
|
19820 |
+
|
19821 |
+
|
19822 |
+
|
19823 |
+
|
19824 |
+
|
19825 |
+
|
19826 |
+
|
19827 |
+
|
19828 |
+
|
19829 |
+
|
19830 |
+
|
19831 |
+
|
19832 |
+
|
19833 |
+
|
19834 |
+
|
19835 |
+
|
19836 |
+
|
19837 |
+
|
19838 |
+
|
19839 |
+
|
19840 |
+
|
19841 |
+
|
19842 |
+
|
19843 |
+
|
19844 |
+
|
19845 |
+
|
19846 |
+
|
19847 |
+
|
19848 |
+
|
19849 |
+
|
19850 |
+
|
19851 |
+
|
19852 |
+
|
19853 |
+
|
19854 |
+
|
19855 |
+
|
19856 |
+
|
19857 |
+
|
19858 |
+
|
19859 |
+
|
19860 |
+
|
19861 |
+
|
19862 |
+
|
19863 |
+
|
19864 |
+
|
19865 |
+
|
19866 |
+
|
19867 |
+
|
19868 |
+
|
19869 |
+
|
19870 |
+
|
19871 |
+
|
19872 |
+
|
19873 |
+
|
19874 |
+
|
19875 |
+
|
19876 |
+
|
19877 |
+
|
19878 |
+
|
19879 |
+
|
19880 |
+
|
19881 |
+
|
19882 |
+
|
19883 |
+
|
19884 |
+
|
19885 |
+
|
19886 |
+
|
19887 |
+
|
19888 |
+
|
19889 |
+
|
19890 |
+
|
19891 |
+
|
19892 |
+
|
19893 |
+
|
19894 |
+
|
19895 |
+
|
19896 |
+
|
19897 |
+
|
19898 |
+
|
19899 |
+
|
19900 |
+
|
19901 |
+
|
19902 |
+
|
19903 |
+
|
19904 |
+
|
19905 |
+
|
19906 |
+
|
19907 |
+
|
19908 |
+
|
19909 |
+
|
19910 |
+
|
19911 |
+
|
19912 |
+
|
19913 |
+
|
19914 |
+
|
19915 |
+
|
19916 |
+
|
19917 |
+
|
19918 |
+
|
19919 |
+
|
19920 |
+
|
19921 |
+
|
19922 |
+
|
19923 |
+
|
19924 |
+
|
19925 |
+
|
19926 |
+
|
19927 |
+
|
19928 |
+
|
19929 |
+
|
19930 |
+
|
19931 |
+
|
19932 |
+
|
19933 |
+
|
19934 |
+
|
19935 |
+
|
19936 |
+
|
19937 |
+
|
19938 |
+
|
19939 |
+
|
19940 |
+
|
19941 |
+
|
19942 |
+
|
19943 |
+
|
19944 |
+
|
19945 |
+
|
19946 |
+
|
19947 |
+
|
19948 |
+
|
19949 |
+
|
19950 |
+
|
19951 |
+
|
19952 |
+
|
19953 |
+
|
19954 |
+
|
19955 |
+
|
19956 |
+
|
19957 |
+
|
19958 |
+
|
19959 |
+
|
19960 |
+
|
19961 |
+
|
19962 |
+
|
19963 |
+
|
19964 |
+
|
19965 |
+
|
19966 |
+
|
19967 |
+
|
19968 |
+
|
19969 |
+
|
19970 |
+
|
19971 |
+
|
19972 |
+
|
19973 |
+
|
19974 |
+
|
19975 |
+
|
19976 |
+
|
19977 |
+
|
19978 |
+
|
19979 |
+
|
19980 |
+
|
19981 |
+
|
19982 |
+
|
19983 |
+
|
19984 |
+
|
19985 |
+
|
19986 |
+
|
19987 |
+
|
19988 |
+
|
19989 |
+
|
19990 |
+
|
19991 |
+
|
19992 |
+
|
19993 |
+
|
19994 |
+
|
19995 |
+
|
19996 |
+
|
19997 |
+
|
19998 |
+
|
19999 |
+
|
20000 |
+
|
20001 |
+
|
20002 |
+
|
20003 |
+
|
20004 |
+
|
20005 |
+
|
20006 |
+
|
20007 |
+
|
20008 |
+
|
20009 |
+
|
20010 |
+
|
20011 |
+
|
20012 |
+
|
20013 |
+
|
20014 |
+
|
20015 |
+
|
20016 |
+
|
20017 |
+
|
20018 |
+
|
20019 |
+
|
20020 |
+
|
20021 |
+
|
20022 |
+
|
20023 |
+
|
20024 |
+
|
20025 |
+
|
20026 |
+
|
20027 |
+
|
20028 |
+
|
20029 |
+
|
20030 |
+
|
20031 |
+
|
20032 |
+
|
20033 |
+
|
20034 |
+
|
20035 |
+
|
20036 |
+
|
20037 |
+
|
20038 |
+
|
20039 |
+
|
20040 |
+
|
20041 |
+
|
20042 |
+
|
20043 |
+
|
20044 |
+
|
20045 |
+
|
20046 |
+
|
20047 |
+
|
20048 |
+
|
20049 |
+
|
20050 |
+
|
20051 |
+
|
20052 |
+
|
20053 |
+
|
20054 |
+
|
20055 |
+
|
20056 |
+
|
20057 |
+
|
20058 |
+
|
20059 |
+
|
20060 |
+
|
20061 |
+
|
20062 |
+
|
20063 |
+
|
20064 |
+
|
20065 |
+
|
20066 |
+
|
20067 |
+
|
20068 |
+
|
20069 |
+
|
20070 |
+
|
20071 |
+
|
20072 |
+
|
20073 |
+
|
20074 |
+
|
20075 |
+
|
20076 |
+
|
20077 |
+
|
20078 |
+
|
20079 |
+
|
20080 |
+
|
20081 |
+
|
20082 |
+
|
20083 |
+
|
20084 |
+
|
20085 |
+
|
20086 |
+
|
20087 |
+
|
20088 |
+
|
20089 |
+
|
20090 |
+
|
20091 |
+
|
20092 |
+
|
20093 |
+
|
20094 |
+
|
20095 |
+
|
20096 |
+
|
20097 |
+
|
20098 |
+
|
20099 |
+
|
20100 |
+
|
20101 |
+
|
20102 |
+
|
20103 |
+
|
20104 |
+
|
20105 |
+
|
20106 |
+
|
20107 |
+
|
20108 |
+
|
20109 |
+
|
20110 |
+
|
20111 |
+
|
20112 |
+
|
20113 |
+
|
20114 |
+
|
20115 |
+
|
20116 |
+
|
20117 |
+
|
20118 |
+
|
20119 |
+
|
20120 |
+
|
20121 |
+
|
20122 |
+
|
20123 |
+
|
20124 |
+
|
20125 |
+
|
20126 |
+
|
20127 |
+
|
20128 |
+
|
20129 |
+
|
20130 |
+
|
20131 |
+
|
20132 |
+
|
20133 |
+
|
20134 |
+
|
20135 |
+
|
20136 |
+
|
20137 |
+
|
20138 |
+
|
20139 |
+
|
20140 |
+
|
20141 |
+
|
20142 |
+
|
20143 |
+
|
20144 |
+
|
20145 |
+
|
20146 |
+
|
20147 |
+
|
20148 |
+
|
20149 |
+
|
20150 |
+
|
20151 |
+
|
20152 |
+
|
20153 |
+
|
20154 |
+
|
20155 |
+
|
20156 |
+
|
20157 |
+
|
20158 |
+
|
20159 |
+
|
20160 |
+
|
20161 |
+
|
20162 |
+
|
20163 |
+
|
20164 |
+
|
20165 |
+
|
20166 |
+
|
20167 |
+
|
20168 |
+
|
20169 |
+
|
20170 |
+
|
20171 |
+
|
20172 |
+
|
20173 |
+
|
20174 |
+
|
20175 |
+
|
20176 |
+
|
20177 |
+
|
20178 |
+
|
20179 |
+
|
20180 |
+
|
20181 |
+
|
20182 |
+
|
20183 |
+
|
20184 |
+
|
20185 |
+
|
20186 |
+
|
20187 |
+
|
20188 |
+
|
20189 |
+
|
20190 |
+
|
20191 |
+
|
20192 |
+
|
20193 |
+
|
20194 |
+
|
20195 |
+
|
20196 |
+
|
20197 |
+
|
20198 |
+
|
20199 |
+
|
20200 |
+
|
20201 |
+
|
20202 |
+
|
20203 |
+
|
20204 |
+
|
20205 |
+
|
20206 |
+
|
20207 |
+
|
20208 |
+
|
20209 |
+
|
20210 |
+
|
20211 |
+
|
20212 |
+
|
20213 |
+
|
20214 |
+
|
20215 |
+
|
20216 |
+
|
20217 |
+
|
20218 |
+
|
20219 |
+
|
20220 |
+
|
20221 |
+
|
20222 |
+
|
20223 |
+
|
20224 |
+
|
20225 |
+
|
20226 |
+
|
20227 |
+
|
20228 |
+
|
20229 |
+
|
20230 |
+
|
20231 |
+
|
20232 |
+
|
20233 |
+
|
20234 |
+
|
20235 |
+
|
20236 |
+
|
20237 |
+
|
20238 |
+
|
20239 |
+
|
20240 |
+
|
20241 |
+
|
20242 |
+
|
20243 |
+
|
20244 |
+
|
20245 |
+
|
20246 |
+
|
20247 |
+
|
20248 |
+
|
20249 |
+
|
20250 |
+
|
20251 |
+
|
20252 |
+
|
20253 |
+
|
20254 |
+
|
20255 |
+
|
20256 |
+
|
20257 |
+
|
20258 |
+
|
20259 |
+
|
20260 |
+
|
20261 |
+
|
20262 |
+
|
20263 |
+
|
20264 |
+
|
20265 |
+
|
20266 |
+
|
20267 |
+
|
20268 |
+
|
20269 |
+
|
20270 |
+
|
20271 |
+
|
20272 |
+
|
20273 |
+
|
20274 |
+
|
20275 |
+
|
20276 |
+
|
20277 |
+
|
20278 |
+
|
20279 |
+
|
20280 |
+
|
20281 |
+
|
20282 |
+
|
20283 |
+
|
20284 |
+
|
20285 |
+
|
20286 |
+
|
20287 |
+
|
20288 |
+
|
20289 |
+
|
20290 |
+
|
20291 |
+
|
20292 |
+
|
20293 |
+
|
20294 |
+
|
20295 |
+
|
20296 |
+
|
20297 |
+
|
20298 |
+
|
20299 |
+
|
20300 |
+
|
20301 |
+
|
20302 |
+
|
20303 |
+
|
20304 |
+
|
20305 |
+
|
20306 |
+
|
20307 |
+
|
20308 |
+
|
20309 |
+
|
20310 |
+
|
20311 |
+
|
20312 |
+
|
20313 |
+
|
20314 |
+
|
20315 |
+
|
20316 |
+
|
20317 |
+
|
20318 |
+
|
20319 |
+
|
20320 |
+
|
20321 |
+
|
20322 |
+
|
20323 |
+
|
20324 |
+
|
20325 |
+
|
20326 |
+
|
20327 |
+
|
20328 |
+
|
20329 |
+
|
20330 |
+
|
20331 |
+
|
20332 |
+
|
20333 |
+
|
20334 |
+
|
20335 |
+
|
20336 |
+
|
20337 |
+
|
20338 |
+
|
20339 |
+
|
20340 |
+
|
20341 |
+
|
20342 |
+
|
20343 |
+
|
20344 |
+
|
20345 |
+
|
20346 |
+
|
20347 |
+
|
20348 |
+
|
20349 |
+
|
20350 |
+
|
20351 |
+
|
20352 |
+
|
20353 |
+
|
20354 |
+
|
20355 |
+
|
20356 |
+
|
20357 |
+
|
20358 |
+
|
20359 |
+
|
20360 |
+
|
20361 |
+
|
20362 |
+
|
20363 |
+
|
20364 |
+
|
20365 |
+
|
20366 |
+
|
20367 |
+
|
20368 |
+
|
20369 |
+
|
20370 |
+
|
20371 |
+
|
20372 |
+
|
20373 |
+
|
20374 |
+
|
20375 |
+
|
20376 |
+
|
20377 |
+
|
20378 |
+
|
20379 |
+
|
20380 |
+
|
20381 |
+
|
20382 |
+
|
20383 |
+
|
20384 |
+
|
20385 |
+
|
20386 |
+
|
20387 |
+
|
20388 |
+
|
20389 |
+
|
20390 |
+
|
20391 |
+
|
20392 |
+
|
20393 |
+
|
20394 |
+
|
20395 |
+
|
20396 |
+
|
20397 |
+
|
20398 |
+
|
20399 |
+
|
20400 |
+
|
20401 |
+
|
20402 |
+
|
20403 |
+
|
20404 |
+
|
20405 |
+
|
20406 |
+
|
20407 |
+
|
20408 |
+
|
20409 |
+
|
20410 |
+
|
20411 |
+
|
20412 |
+
|
20413 |
+
|
20414 |
+
Step... (29000/50000 | Loss: 1.6177186965942383, Acc: 0.67269366979599): 60%|█████████████████▍ | 30000/50000 [12:00:13<8:31:46, 1.54s/it]
|
20415 |
+
Evaluating ...: 0%| | 0/130 [00:00<?, ?it/s]
|
20416 |
+
Step... (29500 | Loss: 1.6591482162475586, Learning Rate: 0.00024848486646078527)
|
20417 |
+
|
20418 |
+
|
20419 |
+
|
20420 |
+
|
20421 |
+
|
20422 |
+
|
20423 |
+
|
20424 |
+
|
20425 |
+
|
20426 |
+
|
20427 |
+
|
20428 |
+
|
20429 |
+
[14:08:36] - INFO - __main__ - Saving checkpoint at 30000 steps█████████████████████████████████████████████████████| 130/130 [00:21<00:00, 4.59it/s]
|
20430 |
+
All Flax model weights were used when initializing RobertaForMaskedLM.
|
20431 |
+
Some weights of RobertaForMaskedLM were not initialized from the Flax model and are newly initialized: ['lm_head.decoder.weight', 'roberta.embeddings.position_ids', 'lm_head.decoder.bias']
|
20432 |
+
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
|
20433 |
+
|
20434 |
+
|
20435 |
+
|
20436 |
+
|
20437 |
+
|
20438 |
+
|
20439 |
+
|
20440 |
+
|
20441 |
+
|
20442 |
+
|
20443 |
+
|
20444 |
+
|
20445 |
+
|
20446 |
+
|
20447 |
+
|
20448 |
+
|
20449 |
+
|
20450 |
+
|
20451 |
+
|
20452 |
+
|
20453 |
+
|
20454 |
+
|
20455 |
+
|
20456 |
+
|
20457 |
+
|
20458 |
+
|
20459 |
+
|
20460 |
+
|
20461 |
+
|
20462 |
+
|
20463 |
+
|
20464 |
+
|
20465 |
+
|
20466 |
+
|
20467 |
+
|
20468 |
+
|
20469 |
+
|
20470 |
+
|
20471 |
+
|
20472 |
+
|
20473 |
+
|
20474 |
+
|
20475 |
+
|
20476 |
+
|
20477 |
+
|
20478 |
+
|
20479 |
+
|
20480 |
+
|
20481 |
+
|
20482 |
+
|
20483 |
+
|
20484 |
+
|
20485 |
+
|
20486 |
+
|
20487 |
+
|
20488 |
+
|
20489 |
+
|
20490 |
+
|
20491 |
+
|
20492 |
+
|
20493 |
+
|
20494 |
+
|
20495 |
+
|
20496 |
+
|
20497 |
+
|
20498 |
+
|
20499 |
+
|
20500 |
+
|
20501 |
+
|
20502 |
+
|
20503 |
+
|
20504 |
+
|
20505 |
+
|
20506 |
+
|
20507 |
+
|
20508 |
+
|
20509 |
+
|
20510 |
+
|
20511 |
+
|
20512 |
+
|
20513 |
+
|
20514 |
+
|
20515 |
+
|
20516 |
+
|
20517 |
+
|
20518 |
+
|
20519 |
+
|
20520 |
+
|
20521 |
+
|
20522 |
+
|
20523 |
+
|
20524 |
+
|
20525 |
+
|
20526 |
+
|
20527 |
+
|
20528 |
+
|
20529 |
+
|
20530 |
+
|
20531 |
+
|
20532 |
+
|
20533 |
+
|
20534 |
+
|
20535 |
+
|
20536 |
+
|
20537 |
+
|
20538 |
+
|
20539 |
+
|
20540 |
+
|
20541 |
+
|
20542 |
+
|
20543 |
+
|
20544 |
+
|
20545 |
+
|
20546 |
+
|
20547 |
+
|
20548 |
+
|
20549 |
+
|
20550 |
+
|
20551 |
+
|
20552 |
+
|
20553 |
+
|
20554 |
+
|
20555 |
+
|
20556 |
+
|
20557 |
+
|
20558 |
+
|
20559 |
+
|
20560 |
+
|
20561 |
+
|
20562 |
+
|
20563 |
+
|
20564 |
+
|
20565 |
+
|
20566 |
+
|
20567 |
+
|
20568 |
+
|
20569 |
+
|
20570 |
+
|
20571 |
+
|
20572 |
+
|
20573 |
+
|
20574 |
+
|
20575 |
+
|
20576 |
+
|
20577 |
+
|
20578 |
+
|
20579 |
+
|
20580 |
+
|
20581 |
+
|
20582 |
+
|
20583 |
+
|
20584 |
+
|
20585 |
+
|
20586 |
+
|
20587 |
+
|
20588 |
+
|
20589 |
+
|
20590 |
+
|
20591 |
+
|
20592 |
+
|
20593 |
+
|
20594 |
+
|
20595 |
+
|
20596 |
+
|
20597 |
+
|
20598 |
+
|
20599 |
+
|
20600 |
+
|
20601 |
+
|
20602 |
+
|
20603 |
+
|
20604 |
+
|
20605 |
+
|
20606 |
+
|
20607 |
+
|
20608 |
+
|
20609 |
+
|
20610 |
+
|
20611 |
+
|
20612 |
+
|
20613 |
+
|
20614 |
+
|
20615 |
+
|
20616 |
+
|
20617 |
+
|
20618 |
+
|
20619 |
+
|
20620 |
+
|
20621 |
+
|
20622 |
+
|
20623 |
+
|
20624 |
+
|
20625 |
+
|
20626 |
+
|
20627 |
+
|
20628 |
+
|
20629 |
+
|
20630 |
+
|
20631 |
+
|
20632 |
+
|
20633 |
+
|
20634 |
+
|
20635 |
+
|
20636 |
+
|
20637 |
+
|
20638 |
+
|
20639 |
+
|
20640 |
+
|
20641 |
+
|
20642 |
+
|
20643 |
+
|
20644 |
+
|
20645 |
+
|
20646 |
+
|
20647 |
+
|
20648 |
+
|
20649 |
+
|
20650 |
+
|
20651 |
+
|
20652 |
+
|
20653 |
+
|
20654 |
+
|
20655 |
+
|
20656 |
+
|
20657 |
+
|
20658 |
+
|
20659 |
+
|
20660 |
+
|
20661 |
+
|
20662 |
+
|
20663 |
+
|
20664 |
+
|
20665 |
+
|
20666 |
+
|
20667 |
+
|
20668 |
+
|
20669 |
+
|
20670 |
+
|
20671 |
+
|
20672 |
+
|
20673 |
+
|
20674 |
+
|
20675 |
+
|
20676 |
+
|
20677 |
+
|
20678 |
+
|
20679 |
+
|
20680 |
+
|
20681 |
+
|
20682 |
+
|
20683 |
+
|
20684 |
+
|
20685 |
+
|
20686 |
+
|
20687 |
+
|
20688 |
+
|
20689 |
+
|
20690 |
+
|
20691 |
+
|
20692 |
+
|
20693 |
+
|
20694 |
+
|
20695 |
+
|
20696 |
+
|
20697 |
+
|
20698 |
+
|
20699 |
+
|
20700 |
+
|
20701 |
+
|
20702 |
+
|
20703 |
+
|
20704 |
+
|
20705 |
+
|
20706 |
+
|
20707 |
+
|
20708 |
+
|
20709 |
+
|
20710 |
+
|
20711 |
+
|
20712 |
+
|
20713 |
+
|
20714 |
+
|
20715 |
+
|
20716 |
+
|
20717 |
+
|
20718 |
+
|
20719 |
+
|
20720 |
+
|
20721 |
+
|
20722 |
+
|
20723 |
+
|
20724 |
+
|
20725 |
+
|
20726 |
+
|
20727 |
+
|
20728 |
+
|
20729 |
+
|
20730 |
+
|
20731 |
+
|
20732 |
+
|
20733 |
+
|
20734 |
+
|
20735 |
+
|
20736 |
+
|
20737 |
+
|
20738 |
+
|
20739 |
+
|
20740 |
+
|
20741 |
+
|
20742 |
+
|
20743 |
+
|
20744 |
+
|
20745 |
+
|
20746 |
+
|
20747 |
+
|
20748 |
+
|
20749 |
+
|
20750 |
+
|
20751 |
+
|
20752 |
+
|
20753 |
+
|
20754 |
+
|
20755 |
+
|
20756 |
+
|
20757 |
+
|
20758 |
+
|
20759 |
+
|
20760 |
+
|
20761 |
+
|
20762 |
+
|
20763 |
+
|
20764 |
+
|
20765 |
+
|
20766 |
+
|
20767 |
+
|
20768 |
+
|
20769 |
+
|
20770 |
+
|
20771 |
+
|
20772 |
+
|
20773 |
+
|
20774 |
+
|
20775 |
+
|
20776 |
+
|
20777 |
+
|
20778 |
+
|
20779 |
+
|
20780 |
+
|
20781 |
+
|
20782 |
+
|
20783 |
+
|
20784 |
+
|
20785 |
+
|
20786 |
+
|
20787 |
+
|
20788 |
+
|
20789 |
+
|
20790 |
+
|
20791 |
+
|
20792 |
+
|
20793 |
+
|
20794 |
+
|
20795 |
+
|
20796 |
+
|
20797 |
+
|
20798 |
+
|
20799 |
+
|
20800 |
+
|
20801 |
+
|
20802 |
+
|
20803 |
+
|
20804 |
+
|
20805 |
+
|
20806 |
+
|
20807 |
+
|
20808 |
+
|
20809 |
+
|
20810 |
+
|
20811 |
+
|
20812 |
+
|
20813 |
+
|
20814 |
+
|
20815 |
+
|
20816 |
+
|
20817 |
+
|
20818 |
+
|
20819 |
+
|
20820 |
+
|
20821 |
+
|
20822 |
+
|
20823 |
+
|
20824 |
+
|
20825 |
+
|
20826 |
+
|
20827 |
+
|
20828 |
+
|
20829 |
+
|
20830 |
+
|
20831 |
+
|
20832 |
+
|
20833 |
+
|
20834 |
+
|
20835 |
+
|
20836 |
+
|
20837 |
+
|
20838 |
+
|
20839 |
+
|
20840 |
+
|
20841 |
+
|
20842 |
+
|
20843 |
+
|
20844 |
+
|
20845 |
+
|
20846 |
+
|
20847 |
+
|
20848 |
+
|
20849 |
+
|
20850 |
+
|
20851 |
+
|
20852 |
+
|
20853 |
+
|
20854 |
+
|
20855 |
+
|
20856 |
+
|
20857 |
+
|
20858 |
+
|
20859 |
+
|
20860 |
+
|
20861 |
+
|
20862 |
+
|
20863 |
+
|
20864 |
+
|
20865 |
+
|
20866 |
+
|
20867 |
+
|
20868 |
+
|
20869 |
+
|
20870 |
+
|
20871 |
+
|
20872 |
+
|
20873 |
+
|
20874 |
+
|
20875 |
+
|
20876 |
+
|
20877 |
+
|
20878 |
+
|
20879 |
+
|
20880 |
+
|
20881 |
+
|
20882 |
+
|
20883 |
+
|
20884 |
+
|
20885 |
+
|
20886 |
+
|
20887 |
+
|
20888 |
+
|
20889 |
+
|
20890 |
+
|
20891 |
+
|
20892 |
+
|
20893 |
+
|
20894 |
+
|
20895 |
+
|
20896 |
+
|
20897 |
+
|
20898 |
+
|
20899 |
+
|
20900 |
+
|
20901 |
+
|
20902 |
+
|
20903 |
+
|
20904 |
+
|
20905 |
+
|
20906 |
+
|
20907 |
+
|
20908 |
+
|
20909 |
+
|
20910 |
+
|
20911 |
+
|
20912 |
+
|
20913 |
+
|
20914 |
+
|
20915 |
+
|
20916 |
+
|
20917 |
+
|
20918 |
+
|
20919 |
+
|
20920 |
+
|
20921 |
+
|
20922 |
+
|
20923 |
+
|
20924 |
+
|
20925 |
+
|
20926 |
+
|
20927 |
+
|
20928 |
+
|
20929 |
+
|
20930 |
+
|
20931 |
+
|
20932 |
+
|
20933 |
+
|
20934 |
+
|
20935 |
+
|
20936 |
+
|
20937 |
+
|
20938 |
+
|
20939 |
+
|
20940 |
+
|
20941 |
+
|
20942 |
+
|
20943 |
+
|
20944 |
+
|
20945 |
+
|
20946 |
+
|
20947 |
+
|
20948 |
+
|
20949 |
+
|
20950 |
+
|
20951 |
+
|
20952 |
+
|
20953 |
+
|
20954 |
+
|
20955 |
+
|
20956 |
+
|
20957 |
+
|
20958 |
+
|
20959 |
+
|
20960 |
+
|
20961 |
+
|
20962 |
+
|
20963 |
+
|
20964 |
+
|
20965 |
+
|
20966 |
+
|
20967 |
+
|
20968 |
+
|
20969 |
+
|
20970 |
+
|
20971 |
+
|
20972 |
+
|
20973 |
+
|
20974 |
+
|
20975 |
+
|
20976 |
+
|
20977 |
+
|
20978 |
+
|
20979 |
+
|
20980 |
+
|
20981 |
+
|
20982 |
+
|
20983 |
+
|
20984 |
+
|
20985 |
+
|
20986 |
+
|
20987 |
+
|
20988 |
+
|
20989 |
+
|
20990 |
+
|
20991 |
+
|
20992 |
+
|
20993 |
+
|
20994 |
+
|
20995 |
+
|
20996 |
+
|
20997 |
+
|
20998 |
+
|
20999 |
+
|
21000 |
+
|
21001 |
+
|
21002 |
+
|
21003 |
+
|
21004 |
+
|
21005 |
+
|
21006 |
+
|
21007 |
+
|
21008 |
+
|
21009 |
+
|
21010 |
+
|
21011 |
+
|
21012 |
+
|
21013 |
+
|
21014 |
+
|
21015 |
+
|
21016 |
+
|
21017 |
+
|
21018 |
+
|
21019 |
+
|
21020 |
+
|
21021 |
+
|
21022 |
+
|
21023 |
+
|
21024 |
+
|
21025 |
+
|
21026 |
+
|
21027 |
+
|
21028 |
+
|
21029 |
+
|
21030 |
+
|
21031 |
+
|
21032 |
+
|
21033 |
+
|
21034 |
+
|
21035 |
+
|
21036 |
+
|
21037 |
+
|
21038 |
+
|
21039 |
+
|
21040 |
+
|
21041 |
+
|
21042 |
+
|
21043 |
+
|
21044 |
+
|
21045 |
+
|
21046 |
+
|
21047 |
+
|
21048 |
+
|
21049 |
+
|
21050 |
+
|
21051 |
+
|
21052 |
+
|
21053 |
+
|
21054 |
+
|
21055 |
+
|
21056 |
+
|
21057 |
+
|
21058 |
+
|
21059 |
+
|
21060 |
+
|
21061 |
+
|
21062 |
+
|
21063 |
+
|
21064 |
+
|
21065 |
+
|
21066 |
+
|
21067 |
+
|
21068 |
+
|
21069 |
+
|
21070 |
+
|
21071 |
+
|
21072 |
+
|
21073 |
+
|
21074 |
+
|
21075 |
+
|
21076 |
+
|
21077 |
+
|
21078 |
+
|
21079 |
+
|
21080 |
+
|
21081 |
+
|
21082 |
+
|
21083 |
+
|
21084 |
+
|
21085 |
+
|
21086 |
+
|
21087 |
+
|
21088 |
+
|
21089 |
+
|
21090 |
+
|
21091 |
+
|
21092 |
+
|
21093 |
+
|
21094 |
+
|
21095 |
+
|
21096 |
+
|
21097 |
+
|
21098 |
+
|
21099 |
+
|
21100 |
+
|
21101 |
+
|
21102 |
+
|
21103 |
+
|
21104 |
+
|
21105 |
+
|
21106 |
+
|
21107 |
+
|
21108 |
+
|
21109 |
+
|
21110 |
+
|
21111 |
+
|
21112 |
+
|
21113 |
+
|
21114 |
+
|
21115 |
+
|
21116 |
+
|
21117 |
+
|
21118 |
+
|
21119 |
+
|
21120 |
+
|
21121 |
+
|
21122 |
+
Step... (30000/50000 | Loss: 1.6142958402633667, Acc: 0.6730945110321045): 62%|████████████████▋ | 31000/50000 [12:25:40<6:58:17, 1.32s/it]
|
21123 |
+
Evaluating ...: 14%|█████████████▎ | 18/130 [00:01<00:07, 15.90it/s]
|
21124 |
+
Step... (30500 | Loss: 1.712971806526184, Learning Rate: 0.00023636363039258868)
|
21125 |
+
|
21126 |
+
|
21127 |
+
|
21128 |
+
|
21129 |
+
|
21130 |
+
|
21131 |
+
|
21132 |
+
|
21133 |
+
|
21134 |
+
|
21135 |
+
|
21136 |
+
|
21137 |
+
[14:34:03] - INFO - __main__ - Saving checkpoint at 31000 steps█████████████████████████████████████████████████████| 130/130 [00:21<00:00, 4.60it/s]
|
21138 |
+
All Flax model weights were used when initializing RobertaForMaskedLM.
|
21139 |
+
Some weights of RobertaForMaskedLM were not initialized from the Flax model and are newly initialized: ['lm_head.decoder.weight', 'roberta.embeddings.position_ids', 'lm_head.decoder.bias']
|
21140 |
+
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
|
21141 |
+
|
21142 |
+
|
21143 |
+
|
21144 |
+
|
21145 |
+
|
21146 |
+
|
21147 |
+
|
21148 |
+
|
21149 |
+
|
21150 |
+
|
21151 |
+
|
21152 |
+
|
21153 |
+
|
21154 |
+
|
21155 |
+
|
21156 |
+
|
21157 |
+
|
21158 |
+
|
21159 |
+
|
21160 |
+
|
21161 |
+
|
21162 |
+
|
21163 |
+
|
21164 |
+
|
21165 |
+
|
21166 |
+
|
21167 |
+
|
21168 |
+
|
21169 |
+
|
21170 |
+
|
21171 |
+
|
21172 |
+
|
21173 |
+
|
21174 |
+
|
21175 |
+
|
21176 |
+
|
21177 |
+
|
21178 |
+
|
21179 |
+
|
21180 |
+
|
21181 |
+
|
21182 |
+
|
21183 |
+
|
21184 |
+
|
21185 |
+
|
21186 |
+
|
21187 |
+
|
21188 |
+
|
21189 |
+
|
21190 |
+
|
21191 |
+
|
21192 |
+
|
21193 |
+
|
21194 |
+
|
21195 |
+
|
21196 |
+
|
21197 |
+
|
21198 |
+
|
21199 |
+
|
21200 |
+
|
21201 |
+
|
21202 |
+
|
21203 |
+
|
21204 |
+
|
21205 |
+
|
21206 |
+
|
21207 |
+
|
21208 |
+
|
21209 |
+
|
21210 |
+
|
21211 |
+
|
21212 |
+
|
21213 |
+
|
21214 |
+
|
21215 |
+
|
21216 |
+
|
21217 |
+
|
21218 |
+
|
21219 |
+
|
21220 |
+
|
21221 |
+
|
21222 |
+
|
21223 |
+
|
21224 |
+
|
21225 |
+
|
21226 |
+
|
21227 |
+
|
21228 |
+
|
21229 |
+
|
21230 |
+
|
21231 |
+
|
21232 |
+
|
21233 |
+
|
21234 |
+
|
21235 |
+
|
21236 |
+
|
21237 |
+
|
21238 |
+
|
21239 |
+
|
21240 |
+
|
21241 |
+
|
21242 |
+
|
21243 |
+
|
21244 |
+
|
21245 |
+
|
21246 |
+
|
21247 |
+
|
21248 |
+
|
21249 |
+
|
21250 |
+
|
21251 |
+
|
21252 |
+
|
21253 |
+
|
21254 |
+
|
21255 |
+
|
21256 |
+
|
21257 |
+
|
21258 |
+
|
21259 |
+
|
21260 |
+
|
21261 |
+
|
21262 |
+
|
21263 |
+
|
21264 |
+
|
21265 |
+
|
21266 |
+
|
21267 |
+
|
21268 |
+
|
21269 |
+
|
21270 |
+
|
21271 |
+
|
21272 |
+
|
21273 |
+
|
21274 |
+
|
21275 |
+
|
21276 |
+
|
21277 |
+
|
21278 |
+
|
21279 |
|
21280 |
|
21281 |
|
wandb/run-20210726_001233-17u6inbn/files/wandb-summary.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"global_step":
|
|
|
1 |
+
{"global_step": 31000, "_timestamp": 1627310015.206409, "train_time": 1810641.125, "train_learning_rate": 0.00023030306329019368, "_step": 61814, "train_loss": 1.6786357164382935, "eval_accuracy": 0.6730945110321045, "eval_loss": 1.6142958402633667}
|
wandb/run-20210726_001233-17u6inbn/logs/debug-internal.log
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f796f543dac0ebb90fd654d77cc33441df5f7b3517d66f26e0456f3e2f83d06
|
3 |
+
size 24483658
|
wandb/run-20210726_001233-17u6inbn/run-17u6inbn.wandb
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e7542027cf0f1a8fe825b6101a9e222367928ba7623d6b21687570cfcac54f7
|
3 |
+
size 12299882
|