rohankaran commited on
Commit
4962c42
1 Parent(s): 13d0c78

Activate additional models in guardrails_models.py

Browse files

The code updates in guardrails_models.py have uncommented previously disabled models. By enabling these models in the 'get_all_models' function, we are enhancing our predictive system's versatility and allowing it to utilize additional language models such as "gpt3.5-turbo-1106", "Llama-2-70b-chat-hf", "Mixtral-8x7B-Instruct-v0.1", and various combinations with guardrails.

Files changed (1) hide show
  1. guardrails_models.py +40 -40
guardrails_models.py CHANGED
@@ -393,50 +393,50 @@ def gemini_pro_nemoguardrails(
393
 
394
  def get_all_models():
395
  return [
396
- # {
397
- # "name": "gpt3.5-turbo-1106",
398
- # "model": gpt35_turbo,
399
- # },
400
- # {
401
- # "name": "Llama-2-70b-chat-hf",
402
- # "model": llama70B,
403
- # },
404
- # {
405
- # "name": "Mixtral-8x7B-Instruct-v0.1",
406
- # "model": mixtral7x8,
407
- # },
408
  {
409
  "name": "Gemini-Pro",
410
  "model": gemini_pro,
411
  },
412
- # {
413
- # "name": "gpt3.5-turbo-1106 + Llama Guard",
414
- # "model": gpt35_turbo_llamaguard,
415
- # },
416
- # {
417
- # "name": "Llama-2-70b-chat-hf + Llama Guard",
418
- # "model": llama70B_llamaguard,
419
- # },
420
- # {
421
- # "name": "Mixtral-8x7B-Instruct-v0.1 + Llama Guard",
422
- # "model": mixtral7x8_llamaguard,
423
- # },
424
- # {
425
- # "name": "Gemini-Pro + Llama Guard",
426
- # "model": gemini_pro_llamaguard,
427
- # },
428
- # {
429
- # "name": "gpt3.5-turbo-1106 + NeMo Guardrails",
430
- # "model": gpt35_turbo_nemoguardrails,
431
- # },
432
- # {
433
- # "name": "Llama-2-70b-chat-hf + NeMo Guardrails",
434
- # "model": llama70B_nemoguardrails,
435
- # },
436
- # {
437
- # "name": "Mixtral-8x7B-Instruct-v0.1 + NeMo Guardrails",
438
- # "model": mixtral7x8_nemoguardrails,
439
- # },
440
  {
441
  "name": "Gemini-Pro + NeMo Guardrails",
442
  "model": gemini_pro_nemoguardrails,
 
393
 
394
  def get_all_models():
395
  return [
396
+ {
397
+ "name": "gpt3.5-turbo-1106",
398
+ "model": gpt35_turbo,
399
+ },
400
+ {
401
+ "name": "Llama-2-70b-chat-hf",
402
+ "model": llama70B,
403
+ },
404
+ {
405
+ "name": "Mixtral-8x7B-Instruct-v0.1",
406
+ "model": mixtral7x8,
407
+ },
408
  {
409
  "name": "Gemini-Pro",
410
  "model": gemini_pro,
411
  },
412
+ {
413
+ "name": "gpt3.5-turbo-1106 + Llama Guard",
414
+ "model": gpt35_turbo_llamaguard,
415
+ },
416
+ {
417
+ "name": "Llama-2-70b-chat-hf + Llama Guard",
418
+ "model": llama70B_llamaguard,
419
+ },
420
+ {
421
+ "name": "Mixtral-8x7B-Instruct-v0.1 + Llama Guard",
422
+ "model": mixtral7x8_llamaguard,
423
+ },
424
+ {
425
+ "name": "Gemini-Pro + Llama Guard",
426
+ "model": gemini_pro_llamaguard,
427
+ },
428
+ {
429
+ "name": "gpt3.5-turbo-1106 + NeMo Guardrails",
430
+ "model": gpt35_turbo_nemoguardrails,
431
+ },
432
+ {
433
+ "name": "Llama-2-70b-chat-hf + NeMo Guardrails",
434
+ "model": llama70B_nemoguardrails,
435
+ },
436
+ {
437
+ "name": "Mixtral-8x7B-Instruct-v0.1 + NeMo Guardrails",
438
+ "model": mixtral7x8_nemoguardrails,
439
+ },
440
  {
441
  "name": "Gemini-Pro + NeMo Guardrails",
442
  "model": gemini_pro_nemoguardrails,