ZeroCool94 commited on
Commit
837357f
1 Parent(s): 1808282

Upload 4 files

Browse files
maskgit.160000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:871579412b077c582830ea80076fb0ba66f40015142238b3c1e94a604a8d27b6
3
+ size 804561724
maskgit.160000.pt.yaml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ total_params: 66889174
2
+ image_size: 128
3
+ num_tokens: 8192
4
+ num_train_steps: -1
5
+ num_epochs: 20
6
+ dim: 64
7
+ channels: 4
8
+ batch_size: 1
9
+ lr: 0.0001
10
+ gradient_accumulation_steps: 100
11
+ save_results_every: 500
12
+ save_model_every: 5000
13
+ vq_codebook_size: 8192
14
+ vq_codebook_dim: 8192
15
+ lr_scheduler: cosine_with_restarts
16
+ lr_warmup_steps: 100
17
+ seq_len: 8192
18
+ depth: 4
19
+ dim_head: 64
20
+ heads: 8
21
+ ff_mult: 4
22
+ t5_name: t5-small
23
+ mixed_precision: 'no'
24
+ cond_image_size: null
25
+ validation_prompt: a girl|a cat|a dog
26
+ timesteps: 18
27
+ optimizer: Adam
28
+ only_save_last_checkpoint: false
29
+ validation_image_scale: 1.0
30
+ no_center_crop: false
31
+ no_flip: false
32
+ dataset_save_path: E:\cached_datasets\INE2
33
+ clear_previous_experiments: false
34
+ max_grad_norm: null
35
+ seed: 42
36
+ valid_frac: 0.05
37
+ use_ema: false
38
+ ema_beta: 0.995
39
+ ema_update_after_step: 1
40
+ ema_update_every: 1
41
+ apply_grad_penalty_every: 4
42
+ image_column: image
43
+ caption_column: caption
44
+ log_with: wandb
45
+ use_8bit_adam: false
46
+ results_dir: results\Muse-v0.10
47
+ logging_dir: null
48
+ vae_path: results\Muse-v0.10\vae.1456000.pt
49
+ dataset_name: null
50
+ hf_split_name: null
51
+ streaming: false
52
+ train_data_dir: E:\dataset
53
+ checkpoint_limit: null
54
+ cond_drop_prob: 0.5
55
+ scheduler_power: 1.0
56
+ num_cycles: 200
57
+ resume_path: results\Muse-v0.10\maskgit.143000.pt
58
+ taming_model_path: null
59
+ taming_config_path: null
60
+ weight_decay: 0.0001
61
+ cache_path: null
62
+ no_cache: true
63
+ link: false
64
+ latest_checkpoint: true
65
+ do_not_save_config: false
66
+ use_l2_recon_loss: false
67
+ debug: false
68
+ config_path: null
69
+ attention_type: ein
70
+ random_crop: true
71
+ project_name: muse_maskgit
72
+ run_name: null
73
+ wandb_user: sygil
74
+ layers: 4
75
+ discr_layers: 4
vae.14120000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edce56ad8b9f51fbf872d29b23e7028784863643b1e448738f81dd4f0fc9a5fc
3
+ size 577198907
vae.14120000.pt.yaml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ total_params: 10067428
2
+ image_size: 256
3
+ num_train_steps: -1
4
+ num_epochs: 50
5
+ batch_size: 1
6
+ lr: 0.0001
7
+ lr_warmup_steps: 1
8
+ lr_scheduler: constant
9
+ gradient_accumulation_steps: 2000
10
+ save_results_every: 500
11
+ save_model_every: 5000
12
+ dim: 32
13
+ vq_codebook_size: 8192
14
+ vq_codebook_dim: 8192
15
+ seq_len: 1024
16
+ channels: 3
17
+ layers: 4
18
+ discr_layers: 4
19
+ scheduler_power: 1.0
20
+ num_cycles: 200
21
+ only_save_last_checkpoint: false
22
+ validation_image_scale: 1.0
23
+ no_center_crop: false
24
+ no_flip: false
25
+ random_crop: true
26
+ dataset_save_path: E:/cached_datasets/INE
27
+ clear_previous_experiments: false
28
+ max_grad_norm: null
29
+ discr_max_grad_norm: null
30
+ seed: 42
31
+ valid_frac: 0.05
32
+ use_ema: false
33
+ ema_beta: 0.995
34
+ ema_update_after_step: 1
35
+ ema_update_every: 1
36
+ apply_grad_penalty_every: 4
37
+ image_column: image
38
+ caption_column: caption
39
+ log_with: wandb
40
+ mixed_precision: 'no'
41
+ use_8bit_adam: false
42
+ results_dir: results\Muse-v0.9_test
43
+ logging_dir: null
44
+ resume_path: results\Muse-v0.9_test\vae.14020000.pt
45
+ dataset_name: null
46
+ streaming: false
47
+ train_data_dir: F:/Hydrus Files
48
+ checkpoint_limit: null
49
+ cond_drop_prob: 0.5
50
+ taming_model_path: null
51
+ taming_config_path: null
52
+ optimizer: Adam
53
+ weight_decay: 0.0001
54
+ cache_path: null
55
+ no_cache: true
56
+ latest_checkpoint: true
57
+ do_not_save_config: false
58
+ use_l2_recon_loss: false
59
+ debug: false
60
+ config_path: null
61
+ webdataset: null
62
+ project_name: muse_vae
63
+ run_name: null
64
+ wandb_user: null
65
+ hf_split_name: train