tanyuzhou commited on
Commit
0b203f0
β€’
1 Parent(s): d290873

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -6,8 +6,12 @@ from transformers import TextStreamer
6
 
7
  import spaces
8
 
 
 
 
 
9
  # Load model and tokenizer
10
- model = AutoModelForCausalLM.from_pretrained("Rorical/0-roleplay", return_dict=True, trust_remote_code=True)
11
  tokenizer = AutoTokenizer.from_pretrained("Rorical/0-roleplay", trust_remote_code=True)
12
  tokenizer.chat_template = "{% for message in messages %}{{'' + ((message['role'] + '\n') if message['role'] != '' else '') + message['content'] + '' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ 'ζ˜Ÿι‡Ž\n' }}{% endif %}"
13
 
 
6
 
7
  import spaces
8
 
9
+ quantization_config = BitsAndBytesConfig(
10
+ load_in_4bit=True, bnb_4bit_compute_dtype=torch.float16
11
+ )
12
+
13
  # Load model and tokenizer
14
+ model = AutoModelForCausalLM.from_pretrained("Rorical/0-roleplay", return_dict=True, trust_remote_code=True, quantization_config=quantization_config)
15
  tokenizer = AutoTokenizer.from_pretrained("Rorical/0-roleplay", trust_remote_code=True)
16
  tokenizer.chat_template = "{% for message in messages %}{{'' + ((message['role'] + '\n') if message['role'] != '' else '') + message['content'] + '' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ 'ζ˜Ÿι‡Ž\n' }}{% endif %}"
17