Update status of Weyaxi/Nous-Hermes-2-SUS-Chat-34B-Slerp_eval_request_False_bfloat16_Original to FINISHED
Browse files
Weyaxi/Nous-Hermes-2-SUS-Chat-34B-Slerp_eval_request_False_bfloat16_Original.json
CHANGED
@@ -8,10 +8,24 @@
|
|
8 |
"architectures": "LlamaForCausalLM",
|
9 |
"weight_type": "Original",
|
10 |
"main_language": "English",
|
11 |
-
"status": "
|
12 |
"submitted_time": "2024-06-16T05:01:18Z",
|
13 |
"model_type": "🤝 : base merges and moerges",
|
14 |
"source": "leaderboard",
|
15 |
"job_id": 821,
|
16 |
-
"job_start_time": "2024-06-16T06-32-30.997295"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
}
|
|
|
8 |
"architectures": "LlamaForCausalLM",
|
9 |
"weight_type": "Original",
|
10 |
"main_language": "English",
|
11 |
+
"status": "FINISHED",
|
12 |
"submitted_time": "2024-06-16T05:01:18Z",
|
13 |
"model_type": "🤝 : base merges and moerges",
|
14 |
"source": "leaderboard",
|
15 |
"job_id": 821,
|
16 |
+
"job_start_time": "2024-06-16T06-32-30.997295",
|
17 |
+
"eval_version": "1.1.0",
|
18 |
+
"result_metrics": {
|
19 |
+
"enem_challenge": 0.7389783065080476,
|
20 |
+
"bluex": 0.6801112656467315,
|
21 |
+
"oab_exams": 0.5608200455580865,
|
22 |
+
"assin2_rte": 0.9231939503620477,
|
23 |
+
"assin2_sts": 0.808342331307072,
|
24 |
+
"faquad_nli": 0.79766439567281,
|
25 |
+
"hatebr_offensive": 0.8489313135505292,
|
26 |
+
"portuguese_hate_speech": 0.6411463077286375,
|
27 |
+
"tweetsentbr": 0.7481822026239117
|
28 |
+
},
|
29 |
+
"result_metrics_average": 0.7497077909953193,
|
30 |
+
"result_metrics_npm": 0.6213927575767522
|
31 |
}
|