kanjileon-7b-vqvae-hf / config.json
leloy's picture
Upload folder using huggingface_hub
6deee65 verified
raw
history blame contribute delete
No virus
764 Bytes
{
"_name_or_path": "leloy/Anole-7b-v0.1-vqvae-hf",
"architectures": [
"ChameleonVQVAE"
],
"attn_resolutions": [],
"attn_type": "vanilla",
"base_channels": 128,
"ch": 128,
"ch_mult": [
1,
1,
2,
2,
4
],
"channel_multiplier": [
1,
1,
2,
2,
4
],
"double_latent": false,
"double_z": false,
"dropout": 0.0,
"embed_dim": 256,
"in_channels": 3,
"initializer_range": 0.02,
"latent_channels": 256,
"model_type": "chameleon_vqgan",
"n_embed": 8192,
"num_embeddings": 8192,
"num_res_blocks": 2,
"out_ch": 3,
"out_channels": 3,
"quant_state_dims": [
32,
32
],
"resolution": 128,
"torch_dtype": "bfloat16",
"transformers_version": "4.44.0.dev0",
"z_channels": 256
}