Add usage
Browse files
README.md
CHANGED
@@ -94,6 +94,20 @@ python3 zipnn_decompress_path.py --path .
|
|
94 |
```
|
95 |
|
96 |
Now just run the local version of the model.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
|
98 |
## Prompt Template
|
99 |
|
|
|
94 |
```
|
95 |
|
96 |
Now just run the local version of the model.
|
97 |
+
# Use a pipeline as a high-level helper
|
98 |
+
from transformers import pipeline
|
99 |
+
|
100 |
+
messages = [
|
101 |
+
{"role": "user", "content": "Who are you?"},
|
102 |
+
]
|
103 |
+
pipe = pipeline("text-generation", model="PATH_TO_MODEL") # "." if in directory
|
104 |
+
pipe(messages)
|
105 |
+
|
106 |
+
# Load model directly
|
107 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
108 |
+
|
109 |
+
tokenizer = AutoTokenizer.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
110 |
+
model = AutoModelForCausalLM.from_pretrained("PATH_TO_MODEL") # "." if in directory
|
111 |
|
112 |
## Prompt Template
|
113 |
|