gregH commited on
Commit
6b44825
1 Parent(s): 3b92e26

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -163,7 +163,7 @@ def gradient_cuff_reject(message,with_defense, sample_times,perturb_times,thresh
163
  labels=get_labels(responses)
164
  results=[None for _ in range(perturb_times+1)]
165
  results[0]=sum(labels)/len(labels)
166
- if results[0]>0.5:
167
  return (True,1-results[0],None)
168
 
169
  # second-stage rejection
@@ -216,7 +216,8 @@ def chat(message, history, with_defense,threshold):
216
  messages = tok.apply_chat_template(chat, tokenize=False, add_generation_prompt=True)
217
  # Tokenize the messages string
218
  input_ids = tok([messages], return_tensors="pt")["input_ids"]
219
- response= "[Gradient Cuff Checking: "+reject_information + "]\n"+ chat_engine(input_ids)
 
220
  response=response.split(" ")
221
 
222
  # Initialize an empty string to store the generated text
 
163
  labels=get_labels(responses)
164
  results=[None for _ in range(perturb_times+1)]
165
  results[0]=sum(labels)/len(labels)
166
+ if results[0]>=0.5:
167
  return (True,1-results[0],None)
168
 
169
  # second-stage rejection
 
216
  messages = tok.apply_chat_template(chat, tokenize=False, add_generation_prompt=True)
217
  # Tokenize the messages string
218
  input_ids = tok([messages], return_tensors="pt")["input_ids"]
219
+ #response= "[Gradient Cuff Checking: "+reject_information + "]\n"+ chat_engine(input_ids)
220
+ response=chat_engine(input_ids)
221
  response=response.split(" ")
222
 
223
  # Initialize an empty string to store the generated text