moondream1 / config.json
not-lain's picture
add pipeline
73bb7ab
raw
history blame
743 Bytes
{
"_name_or_path": "vikhyatk/moondream1",
"architectures": [
"Moondream"
],
"auto_map": {
"AutoConfig": "vikhyatk/moondream1--configuration_moondream.MoondreamConfig",
"AutoModelForCausalLM": "vikhyatk/moondream1--moondream.Moondream"
},
"custom_pipelines": {
"visual-question-answering": {
"default": {
"model": {
"pt": [
"vikhyatk/moondream1",
"main"
]
}
},
"impl": "MyPipe.VQA",
"pt": [
"AutoModelForCausalLM"
],
"tf": [],
"type": "multimodal"
}
},
"model_type": "moondream1",
"phi_config": {
"model_type": "phi-msft"
},
"torch_dtype": "float32",
"transformers_version": "4.35.2"
}