#!/usr/bin/env bash TOKENIZERS_PARALLELISM=false python3 run_distillation_nodes.py \ --model_name_or_path "./nb-distil-large-init" \ --teacher_model_name_or_path "NbAiLab/nb-whisper-large" \ --train_dataset_name "NbAiLab/annotated_distil_raw_ncc_speech_v7_compact8_large" \ --train_dataset_config_name "no" \ --train_split_name "train" \ --eval_dataset_name "NbAiLab/annotated_distil_raw_ncc_speech_v7_compact8_large" \ --eval_dataset_config_name "no" \ --eval_split_name "validation_norwegian_fleurs" \ --eval_steps 10 \ --save_steps 1000 \ --warmup_steps 100 \ --learning_rate 0.0001 \ --lr_scheduler_type "linear" \ --logging_steps 25 \ --save_total_limit 1 \ --max_steps 10000 \ --wer_threshold 10 \ --per_device_train_batch_size 16\ --per_device_eval_batch_size 16 \ --dataloader_num_workers 32 \ --dtype "bfloat16" \ --output_dir "./" \ --do_train \ --do_eval \ --use_scan \ --gradient_checkpointing \ --overwrite_output_dir \ --predict_with_generate \ --freeze_encoder \ --streaming \ --use_auth_token \ --report_to "wandb" \ --wandb_project "nb-distil-whisper-large-test3" \ --hub_model_id "NbAiLab/nb-distil-whisper-large-flax1-no" \ --push_to_hub