File size: 889 Bytes
1904ee8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
mode: eval
push_to_hub: False
gold_eval: none
## costa stuff
model_name: vwxyzjn/EleutherAI_pythia-1b-deduped__dpo__tldr
model_revision: dpo__55513__1707379566
ref_model_name: vwxyzjn/EleutherAI_pythia-1b-deduped__sft__tldr
ref_model_revision: sft__55513__1706646024
dataset_name: vwxyzjn/summarize_from_feedback_oai_preprocessing_1706381144
tokenizer_name: EleutherAI/pythia-1b-deduped
prompt_field: query
eval_split: validation
max_prompt_length: 512
max_target_length: 169
max_length: 638
## hub stuff
push_to_hub_organization: mnoukhov
## training stuff
eval_steps: 0.2
save_steps: 0.2
beta: 0.5
max_steps: -1
num_train_epochs: 2
load_in_8bit: False
bf16: True
fp16: False
learning_rate: 1e-5
use_peft: False
lora_all_linear: True
lora_r: 8
lora_alpha: 32
lora_dropout: 0.05
gradient_accumulation_steps: 4
per_device_train_batch_size: 4
per_device_eval_batch_size: 4
warmup_steps: 150
|