mtasic85 commited on
Commit
026e389
1 Parent(s): 5121df2

pretrain mode

Browse files
Files changed (1) hide show
  1. scripts/pretrain-model.yaml +5 -5
scripts/pretrain-model.yaml CHANGED
@@ -21,11 +21,11 @@ model_config:
21
  mlp_class_name: "LLaMAMLP"
22
  intermediate_size: 4096
23
  rope_base: 500000
24
- rope_adjustments:
25
- factor: 32.0
26
- low_freq_factor: 1.0
27
- high_freq_factor: 4.0
28
- original_max_seq_len: 8192
29
 
30
  # Directory in which to save checkpoints and logs. If running in a Lightning Studio Job, look for it in
31
  # /teamspace/jobs/<job-name>/share. (type: <class 'Path'>, default: out/pretrain)
 
21
  mlp_class_name: "LLaMAMLP"
22
  intermediate_size: 4096
23
  rope_base: 500000
24
+ # rope_adjustments:
25
+ # factor: 32.0
26
+ # low_freq_factor: 1.0
27
+ # high_freq_factor: 4.0
28
+ # original_max_seq_len: 8192
29
 
30
  # Directory in which to save checkpoints and logs. If running in a Lightning Studio Job, look for it in
31
  # /teamspace/jobs/<job-name>/share. (type: <class 'Path'>, default: out/pretrain)