djstrong commited on
Commit
7a002d0
1 Parent(s): 74748ac
Files changed (1) hide show
  1. app.py +30 -0
app.py CHANGED
@@ -9,6 +9,8 @@ import torch
9
  import spaces
10
  import gradio as gr
11
  from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer
 
 
12
 
13
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
14
 
@@ -21,6 +23,9 @@ EMOJI = os.environ.get("EMOJI")
21
  DESCRIPTION = os.environ.get("DESCRIPTION")
22
 
23
  DISCORD_WEBHOOK = os.environ.get("DISCORD_WEBHOOK")
 
 
 
24
 
25
  def send_discord(i,o):
26
  url = DISCORD_WEBHOOK
@@ -111,6 +116,31 @@ def predict(message, history, system_prompt, temperature, max_new_tokens, top_k,
111
  send_discord(instruction, "".join(outputs))
112
 
113
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  # Load model
115
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
116
  quantization_config = BitsAndBytesConfig(
 
9
  import spaces
10
  import gradio as gr
11
  from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer
12
+ from huggingface_hub import HfApi
13
+ from datetime import datetime
14
 
15
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
16
 
 
23
  DESCRIPTION = os.environ.get("DESCRIPTION")
24
 
25
  DISCORD_WEBHOOK = os.environ.get("DISCORD_WEBHOOK")
26
+ TOKEN = os.environ.get("TOKEN")
27
+
28
+
29
 
30
  def send_discord(i,o):
31
  url = DISCORD_WEBHOOK
 
116
  send_discord(instruction, "".join(outputs))
117
 
118
 
119
+ api = HfApi()
120
+ day=datetime.now().strftime("%Y-%m-%d")
121
+ timestamp=datetime.now().timestamp()
122
+ dd={
123
+ 'message': message,
124
+ 'history': history,
125
+ 'system_prompt':system_prompt,
126
+ 'temperature':temperature,
127
+ 'max_new_tokens':max_new_tokens,
128
+ 'top_k':top_k,
129
+ 'repetition_penalty':repetition_penalty,
130
+ 'top_p':top_p,
131
+ 'instruction':instruction,
132
+ 'output':"".join(outputs)
133
+ }
134
+ api.upload_file(
135
+ path_or_fileobj=json.dumps(dd, indent=2, ensure_ascii=False).encode('utf-8'),
136
+ path_in_repo=f"{day}/{timestamp}.json",
137
+ repo_id="speakleash/bielik-logs",
138
+ repo_type="dataset",
139
+ commit_message=f"X",
140
+ token=TOKEN,
141
+ run_as_future=True
142
+ )
143
+
144
  # Load model
145
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
146
  quantization_config = BitsAndBytesConfig(