File size: 3,692 Bytes
b7bb8e1
42ebf2f
b7bb8e1
 
 
 
 
 
 
 
 
 
 
 
 
023be5d
b7bb8e1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
023be5d
b7bb8e1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42ebf2f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9c05a4a
 
 
 
42ebf2f
 
 
e965e83
b9cf51e
023be5d
e965e83
42ebf2f
b7bb8e1
 
 
 
 
e965e83
b7bb8e1
 
 
 
42ebf2f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
from huggingface_hub import *
import os
import json
import gradio as gr

fs = HfFileSystem()
api = HfApi()


def remove_from(text, from_model, to_model):
  text = text.replace(from_model, to_model)
  return text


def return_operation_requests(from_model, to_model):
  ls = [i['name'] for i in fs.ls(path=f'datasets/open-llm-leaderboard/requests/{from_model.split("/")[0]}') if from_model in i['name']]
  liste=[]

  for i in range(len(ls)):
    path_for = ls[i]
    will_write = json.loads(fs.read_text(path_for))
    will_write['model'] = to_model
    will_write = json.dumps(will_write)

    liste.extend([CommitOperationAdd(path_in_repo="/".join(remove_from(path_for, from_model, to_model).split("/")[3:]), path_or_fileobj=will_write.encode()),
                  CommitOperationDelete(path_in_repo="/".join(path_for.split("/")[3:]))])

  return liste


def return_operation_results(from_model, to_model):
  ls = [i['name'] for i in fs.ls(path=f'datasets/open-llm-leaderboard/results/{from_model}') if from_model in i['name']]
  liste=[]

  for i in range(len(ls)):
    path_for = ls[i]


    will_write = json.loads(fs.read_text(path_for))
    will_write['config_general']['model_name'] = to_model
    will_write = json.dumps(will_write, indent=2)

    liste.extend([CommitOperationAdd(path_in_repo="/".join(remove_from(path_for, from_model, to_model).split("/")[3:]), path_or_fileobj=will_write.encode()),
                  CommitOperationDelete(path_in_repo="/".join(path_for.split("/")[3:]))])

  return liste



def model_name_to_details(model_name):
  return f"datasets/open-llm-leaderboard/details_{model_name.split('/')[0]}__{model_name.split('/')[1]}"


def return_operation_details(from_model, to_model):
  ls = [i['name'] for i in fs.ls(path=model_name_to_details(from_model)) if ("results" in i['name'] and ".json" in i['name'])]
  liste=[]

  for i in range(len(ls)):
    path_for = ls[i]

    will_write = json.loads(fs.read_text(path_for))
    will_write['config_general']['model_name'] = to_model
    will_write = json.dumps(will_write, indent=2)

    readme_file = fs.read_text("/".join(path_for.split("/")[:3])+"/README.md").replace(from_model, to_model)

    liste.extend([CommitOperationAdd(path_in_repo="/".join(path_for.split("/")[3:]), path_or_fileobj=will_write.encode()), 
                  CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=readme_file.encode())])

  return liste

def commit(liste_requests, liste_results, liste_details, details_path, from_model, to_model):
  request_commit = (create_commit(repo_id="open-llm-leaderboard/requests", operations=liste_requests, commit_message=f"Renaming Model {from_model} to {to_model}", repo_type="dataset", create_pr=True).__dict__['pr_url'])
  result_commit = (create_commit(repo_id="open-llm-leaderboard/results", operations=liste_results, commit_message=f"Renaming Model {from_model} to {to_model}", repo_type="dataset", create_pr=True).__dict__['pr_url'])
  details_commit = (create_commit(repo_id="/".join(details_path.split("/")[1:]), operations=liste_details, commit_message=f"Renaming Model {from_model} to {to_model}", repo_type="dataset", create_pr=True).__dict__['pr_url'])
  return request_commit, result_commit, details_commit


def commit_gradio(from_model, to_model, hf_token):
    try:
      login(hf_token)
      return commit(return_operation_requests(from_model, to_model), return_operation_results(from_model, to_model), return_operation_details(from_model, to_model), model_name_to_details(from_model), from_model, to_model)
    except Exception as e:
      return e

demo = gr.Interface(fn=commit_gradio, inputs=["text", "text", "text"], outputs="text")

demo.launch(debug=True)