BioMike commited on
Commit
048ae3a
1 Parent(s): 530d8ad

Upload folder using huggingface_hub

Browse files
gliner_config.json CHANGED
@@ -93,16 +93,17 @@
93
  },
94
  "ent_token": "<<ENT>>",
95
  "entity_linking": null,
96
- "eval_every": 2000,
97
  "fine_tune": true,
98
- "freeze_token_rep": false,
 
99
  "fuse_layers": false,
100
  "has_rnn": true,
101
  "hidden_size": 768,
102
  "label_smoothing": 0,
103
- "labels_encoder": "BAAI/bge-small-en-v1.5",
104
  "labels_encoder_config": {
105
- "_name_or_path": "BAAI/bge-small-en-v1.5",
106
  "add_cross_attention": false,
107
  "architectures": [
108
  "BertModel"
@@ -124,14 +125,15 @@
124
  "finetuning_task": null,
125
  "forced_bos_token_id": null,
126
  "forced_eos_token_id": null,
 
127
  "hidden_act": "gelu",
128
  "hidden_dropout_prob": 0.1,
129
- "hidden_size": 384,
130
  "id2label": {
131
  "0": "LABEL_0"
132
  },
133
  "initializer_range": 0.02,
134
- "intermediate_size": 1536,
135
  "is_decoder": false,
136
  "is_encoder_decoder": false,
137
  "label2id": {
@@ -184,7 +186,7 @@
184
  "loss_gamma": 2,
185
  "loss_reduction": "sum",
186
  "lr_encoder": "1e-5",
187
- "lr_others": "5e-5",
188
  "max_grad_norm": 10.0,
189
  "max_len": 768,
190
  "max_neg_type_ratio": 5,
 
93
  },
94
  "ent_token": "<<ENT>>",
95
  "entity_linking": null,
96
+ "eval_every": 4000,
97
  "fine_tune": true,
98
+ "freeze_labels_encoder": true,
99
+ "freeze_token_rep": true,
100
  "fuse_layers": false,
101
  "has_rnn": true,
102
  "hidden_size": 768,
103
  "label_smoothing": 0,
104
+ "labels_encoder": "BAAI/bge-base-en-v1.5",
105
  "labels_encoder_config": {
106
+ "_name_or_path": "BAAI/bge-base-en-v1.5",
107
  "add_cross_attention": false,
108
  "architectures": [
109
  "BertModel"
 
125
  "finetuning_task": null,
126
  "forced_bos_token_id": null,
127
  "forced_eos_token_id": null,
128
+ "gradient_checkpointing": false,
129
  "hidden_act": "gelu",
130
  "hidden_dropout_prob": 0.1,
131
+ "hidden_size": 768,
132
  "id2label": {
133
  "0": "LABEL_0"
134
  },
135
  "initializer_range": 0.02,
136
+ "intermediate_size": 3072,
137
  "is_decoder": false,
138
  "is_encoder_decoder": false,
139
  "label2id": {
 
186
  "loss_gamma": 2,
187
  "loss_reduction": "sum",
188
  "lr_encoder": "1e-5",
189
+ "lr_others": "4e-5",
190
  "max_grad_norm": 10.0,
191
  "max_len": 768,
192
  "max_neg_type_ratio": 5,
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b7d96341caa260fc0defa1a5772d59ee92083519801d7c16351652d69d77fa14
3
- size 1871270621
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d73af055e9515182f8d016db2d04fc493b3a7c7a5deedc090c19271a581d71f
3
+ size 2474339415
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f095f5fb2ff0efe06f1f81a0105f971492ce232fd5e4976cbd88747981a97e8f
3
- size 950390010
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b6f53507c79452abbde53428edb349783c27583a4a616c404b526bb2e813946
3
+ size 1253695538
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b11ed70319d07efe212e384137239ec6805b6350023784aa998107f6e0ef723
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf34449043f04302c6bb4d2f004c142d51f43d7840ce0bb53878a0c4b8924df8
3
  size 14244
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:43c6a5740ed5cabec84a062e3e61a7c21e18017a77aaf7708eb6bf7f9eb315f6
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b1094cbb6a2c31c7b30bd40b4f554fa44434de4a1082e70ab3211446b12306d
3
  size 1064
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff