gweltou commited on
Commit
43d8a78
1 Parent(s): 676f008

Training in progress, step 1000

Browse files
config.json CHANGED
@@ -43,7 +43,7 @@
43
  "feat_extract_activation": "gelu",
44
  "feat_extract_dropout": 0.0,
45
  "feat_extract_norm": "group",
46
- "feat_proj_dropout": 0.0,
47
  "feat_proj_layer_norm": true,
48
  "final_dropout": 0.1,
49
  "gradient_checkpointing": false,
 
43
  "feat_extract_activation": "gelu",
44
  "feat_extract_dropout": 0.0,
45
  "feat_extract_norm": "group",
46
+ "feat_proj_dropout": 0.1,
47
  "feat_proj_layer_norm": true,
48
  "final_dropout": 0.1,
49
  "gradient_checkpointing": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9843b40e62cfa4da0ebd4ad705a7cbdd3973e424850b1dd0e607904e4815326e
3
  size 377666024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5caeafa899763913f44cfec1782804f50ca5604a6952500294656f77d57daaf3
3
  size 377666024
runs/Jun17_09-19-18_gweltaz-NUC10i7FNK/events.out.tfevents.1718608878.gweltaz-NUC10i7FNK.2354.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:234589d226698feada4d49e991476fe256c23e7307515ef1730e1c6f6d1b499b
3
- size 7853
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b4fa3bcddf40efe68688d5306b404d43cff7cd2a2f18f41ee86e496e6100ade
3
+ size 9581
runs/Jun17_10-42-14_gweltaz-NUC10i7FNK/events.out.tfevents.1718613848.gweltaz-NUC10i7FNK.3033.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46c42881ed415364877855bcaca0f00bc3f03cc37dbab8bec48f8309f0bb2bde
3
+ size 6125
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd3eb3b4127f818055fe54c6072af58f96c33156262764a1ade8415c411a912b
3
  size 4475
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26bb3daecab0e28f5552b8727fb71b74406ab4b349ad973a293bee412482e931
3
  size 4475