Spaces:
Running
Running
style
Browse files
README.md
CHANGED
@@ -7,7 +7,7 @@ sdk: gradio
|
|
7 |
sdk_version: 4.19.2
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
-
|
11 |
---
|
12 |
|
13 |
An example chatbot using [Gradio](https://gradio.app), [`huggingface_hub`](https://huggingface.co/docs/huggingface_hub/v0.22.2/en/index), and the [Hugging Face Inference API](https://huggingface.co/docs/api-inference/index).
|
|
|
7 |
sdk_version: 4.19.2
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
+
fullWidth: true
|
11 |
---
|
12 |
|
13 |
An example chatbot using [Gradio](https://gradio.app), [`huggingface_hub`](https://huggingface.co/docs/huggingface_hub/v0.22.2/en/index), and the [Hugging Face Inference API](https://huggingface.co/docs/api-inference/index).
|
app.py
CHANGED
@@ -3,6 +3,7 @@ from openai import OpenAI
|
|
3 |
import os
|
4 |
from typing import List
|
5 |
import logging
|
|
|
6 |
|
7 |
# add logging info to console
|
8 |
logging.basicConfig(level=logging.INFO)
|
@@ -10,13 +11,12 @@ logging.basicConfig(level=logging.INFO)
|
|
10 |
|
11 |
BASE_URL = "https://api.together.xyz/v1"
|
12 |
DEFAULT_API_KEY = os.getenv("TOGETHER_API_KEY")
|
13 |
-
import urllib.request
|
14 |
URIAL_VERSION = "inst_1k_v4.help"
|
15 |
|
16 |
-
|
17 |
-
urial_prompt = urllib.request.urlopen(
|
18 |
-
urial_prompt = urial_prompt.replace("```", '"""')
|
19 |
-
|
20 |
|
21 |
def urial_template(urial_prompt, history, message):
|
22 |
current_prompt = urial_prompt + "\n"
|
@@ -78,7 +78,7 @@ def respond(
|
|
78 |
model_name,
|
79 |
together_api_key
|
80 |
):
|
81 |
-
global
|
82 |
rp = 1.0
|
83 |
prompt = urial_template(urial_prompt, history, message)
|
84 |
if model_name == "Llama-3-8B":
|
@@ -115,14 +115,14 @@ def respond(
|
|
115 |
max_tokens=max_tokens,
|
116 |
top_p=top_p,
|
117 |
repetition_penalty=rp,
|
118 |
-
stop=
|
119 |
|
120 |
response = ""
|
121 |
for msg in request:
|
122 |
# print(msg.choices[0].delta.keys())
|
123 |
token = msg.choices[0].delta["content"]
|
124 |
should_stop = False
|
125 |
-
for _stop in
|
126 |
if _stop in response + token:
|
127 |
should_stop = True
|
128 |
break
|
|
|
3 |
import os
|
4 |
from typing import List
|
5 |
import logging
|
6 |
+
import urllib.request
|
7 |
|
8 |
# add logging info to console
|
9 |
logging.basicConfig(level=logging.INFO)
|
|
|
11 |
|
12 |
BASE_URL = "https://api.together.xyz/v1"
|
13 |
DEFAULT_API_KEY = os.getenv("TOGETHER_API_KEY")
|
|
|
14 |
URIAL_VERSION = "inst_1k_v4.help"
|
15 |
|
16 |
+
URIAL_URL = f"https://raw.githubusercontent.com/Re-Align/URIAL/main/urial_prompts/{URIAL_VERSION}.txt"
|
17 |
+
urial_prompt = urllib.request.urlopen(URIAL_URL).read().decode('utf-8')
|
18 |
+
urial_prompt = urial_prompt.replace("```", '"""') # new version of URIAL uses """ instead of ```
|
19 |
+
STOP_STRS = ['"""', '# Query:', '# Answer:']
|
20 |
|
21 |
def urial_template(urial_prompt, history, message):
|
22 |
current_prompt = urial_prompt + "\n"
|
|
|
78 |
model_name,
|
79 |
together_api_key
|
80 |
):
|
81 |
+
global STOP_STRS, urial_prompt
|
82 |
rp = 1.0
|
83 |
prompt = urial_template(urial_prompt, history, message)
|
84 |
if model_name == "Llama-3-8B":
|
|
|
115 |
max_tokens=max_tokens,
|
116 |
top_p=top_p,
|
117 |
repetition_penalty=rp,
|
118 |
+
stop=STOP_STRS, api_key=api_key)
|
119 |
|
120 |
response = ""
|
121 |
for msg in request:
|
122 |
# print(msg.choices[0].delta.keys())
|
123 |
token = msg.choices[0].delta["content"]
|
124 |
should_stop = False
|
125 |
+
for _stop in STOP_STRS:
|
126 |
if _stop in response + token:
|
127 |
should_stop = True
|
128 |
break
|