Commit
•
2b8f4a6
1
Parent(s):
48d2515
feat: explicitely pass the HF token
Browse files
src/distilabel_dataset_generator/pipelines/sft.py
CHANGED
@@ -1,3 +1,5 @@
|
|
|
|
|
|
1 |
import pandas as pd
|
2 |
from distilabel.llms import InferenceEndpointsLLM
|
3 |
from distilabel.pipeline import Pipeline
|
@@ -139,6 +141,7 @@ def get_pipeline(num_turns, num_rows, system_prompt):
|
|
139 |
llm=InferenceEndpointsLLM(
|
140 |
model_id=MODEL,
|
141 |
tokenizer_id=MODEL,
|
|
|
142 |
magpie_pre_query_template="llama3",
|
143 |
generation_kwargs={
|
144 |
"temperature": 0.8, # it's the best value for Llama 3.1 70B Instruct
|
@@ -168,6 +171,7 @@ def get_pipeline(num_turns, num_rows, system_prompt):
|
|
168 |
def get_prompt_generation_step():
|
169 |
generate_description = TextGeneration(
|
170 |
llm=InferenceEndpointsLLM(
|
|
|
171 |
model_id=MODEL,
|
172 |
tokenizer_id=MODEL,
|
173 |
generation_kwargs={
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
import pandas as pd
|
4 |
from distilabel.llms import InferenceEndpointsLLM
|
5 |
from distilabel.pipeline import Pipeline
|
|
|
141 |
llm=InferenceEndpointsLLM(
|
142 |
model_id=MODEL,
|
143 |
tokenizer_id=MODEL,
|
144 |
+
api_key=os.environ["HF_TOKEN"],
|
145 |
magpie_pre_query_template="llama3",
|
146 |
generation_kwargs={
|
147 |
"temperature": 0.8, # it's the best value for Llama 3.1 70B Instruct
|
|
|
171 |
def get_prompt_generation_step():
|
172 |
generate_description = TextGeneration(
|
173 |
llm=InferenceEndpointsLLM(
|
174 |
+
api_key=os.environ["HF_TOKEN"],
|
175 |
model_id=MODEL,
|
176 |
tokenizer_id=MODEL,
|
177 |
generation_kwargs={
|