andysalerno's picture
Update README.md
d11816a verified
---
library_name: transformers
base_model: openchat/openchat-3.5-0106
datasets:
- berkeley-nest/Nectar
license: apache-2.0
---
max_steps = 1000
learning_rate = 5e-7
label_smoothing = 0.2 # somewhere between 0 and 0.5
warmup_ratio = 0.1
dpo_beta = 0.01
use_rslora = False
use_loftq = False
lora_rank = 16
lora_alpha = 16
lora_dropout = 0.05
load_separate_reference_model = False
max_seq_length = 2048
eval_steps = 200
train_split = 0.008