File size: 526 Bytes
787b275
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "model": "3rd-Degree-Burn/Llama-Squared-8B",
  "base_model": "",
  "revision": "f30737e92b3a3fa0ef2a3f3ade487cc94ad34400",
  "precision": "bfloat16",
  "params": 8.03,
  "architectures": "LlamaForCausalLM",
  "weight_type": "Original",
  "status": "RUNNING",
  "submitted_time": "2024-10-08T13:15:12Z",
  "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
  "job_id": "9420334",
  "job_start_time": "2024-10-08T13:15:40.972382",
  "use_chat_template": true,
  "sender": "3rd-Degree-Burn"
}