Update hf_benchmark_example.py
Browse files- hf_benchmark_example.py +2 -2
hf_benchmark_example.py
CHANGED
@@ -4,10 +4,10 @@ You need a file called "sample.txt" (default path) with text to take tokens for
|
|
4 |
You can use our attached "sample.txt" file with one of Deci's blogs as a prompt.
|
5 |
|
6 |
# Run this and record tokens per second (652 tokens per second on A10 for DeciLM-6b)
|
7 |
-
python
|
8 |
|
9 |
# Run this and record tokens per second (136 tokens per second on A10 for meta-llama/Llama-2-7b-hf), CUDA OOM above batch size 8
|
10 |
-
python
|
11 |
"""
|
12 |
|
13 |
import json
|
|
|
4 |
You can use our attached "sample.txt" file with one of Deci's blogs as a prompt.
|
5 |
|
6 |
# Run this and record tokens per second (652 tokens per second on A10 for DeciLM-6b)
|
7 |
+
python hf_benchmark_example.py --model Deci/DeciLM-6b
|
8 |
|
9 |
# Run this and record tokens per second (136 tokens per second on A10 for meta-llama/Llama-2-7b-hf), CUDA OOM above batch size 8
|
10 |
+
python hf_benchmark_example.py --model meta-llama/Llama-2-7b-hf --batch_size 8
|
11 |
"""
|
12 |
|
13 |
import json
|