aminabbasi commited on
Commit
75b616c
1 Parent(s): f1ca906

Upload 16 files

Browse files
demo-leaderboard/Aya-23_35B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Aya-23-35B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.2107
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.1047
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.22690000000000002
15
+ }
16
+ }
17
+ }
demo-leaderboard/Aya-23_8B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Aya-23-8B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.39640000000000003
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.4142
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.2702
15
+ }
16
+ }
17
+ }
demo-leaderboard/Gemma_1.1_it_7B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Gemma-1.1-it-7B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.4307
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.4068
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.2757
15
+ }
16
+ }
17
+ }
demo-leaderboard/Llama-3.1_Instruct_70B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Llama-3.1-Instruct-70B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.7034
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.6783
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.7040000000000001
15
+ }
16
+ }
17
+ }
demo-leaderboard/Llama-3.1_Instruct_8B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Llama-3.1-Instruct-8B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.45890000000000003
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.41359999999999997
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.3578
15
+ }
16
+ }
17
+ }
demo-leaderboard/Llama-3_Instruct_70B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Llama-3-Instruct-70B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.1954
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.0931
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.005
15
+ }
16
+ }
17
+ }
demo-leaderboard/Llama-3_Instruct_8B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Llama-3-Instruct-8B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.33880000000000005
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.1066
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.34490000000000004
15
+ }
16
+ }
17
+ }
demo-leaderboard/PersianLLaMA_13B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PersianLLaMA-13B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.20129999999999998
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.1852
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.1989
15
+ }
16
+ }
17
+ }
demo-leaderboard/PersianMind_7B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PersianMind-7B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.3578
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.35960000000000003
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.2463
15
+ }
16
+ }
17
+ }
demo-leaderboard/PsychoLexLLaMA-average_70B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PsychoLexLLaMA-average-70B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.6584
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.5306000000000001
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.6966
15
+ }
16
+ }
17
+ }
demo-leaderboard/PsychoLexLLaMA-average_8B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PsychoLexLLaMA-average-8B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.4852
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.41969999999999996
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.4705
15
+ }
16
+ }
17
+ }
demo-leaderboard/PsychoLexLLaMA-pretrain-sft_70B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PsychoLexLLaMA-pretrain-sft-70B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.6779000000000001
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.4534
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.6807
15
+ }
16
+ }
17
+ }
demo-leaderboard/PsychoLexLLaMA-pretrain-sft_8B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "PsychoLexLLaMA-pretrain-sft-8B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.473
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.4313
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.4661
15
+ }
16
+ }
17
+ }
demo-leaderboard/Qwen2_Instruct_72B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Qwen2-Instruct-72B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.31370000000000003
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.0582
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.503
15
+ }
16
+ }
17
+ }
demo-leaderboard/Qwen2_Instruct_7B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "Qwen2-Instruct-7B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.0355
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.061799999999999994
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.0863
15
+ }
16
+ }
17
+ }
demo-leaderboard/c4ai-command-r-v01_35B.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.bfloat16",
4
+ "model_name": "c4ai-command-r-v01-35B"
5
+ },
6
+ "results": {
7
+ "0-shot": {
8
+ "acc": 0.35960000000000003
9
+ },
10
+ "1-shot": {
11
+ "acc": 0.2175
12
+ },
13
+ "5-shot": {
14
+ "acc": 0.462
15
+ }
16
+ }
17
+ }