Spaces:
Sleeping
Sleeping
| [ | |
| { | |
| "model_name": "KFUPM-JRCAI/Qwen3.5-9B-Claude-4.6-HighIQ-INSTRUCT-HERETIC-UNCENSORED-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T12:19:26.492909+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_KFUPM-JRCAI/Qwen3.5-9B-Claude-4.6-HighIQ-INSTRUCT-HERETIC-UNCENSORED-int4-ov" | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/Qwen3-4B-Instruct-2507-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 12.820031905785585, | |
| "quality_overall": 43.33, | |
| "timestamp": "2026-03-12T11:56:36.069177+00:00", | |
| "mlqa_f1": 78.33333333333334, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 50.0, | |
| "aramath_acc": 90.0, | |
| "arabench_dialect_acc": 50.0, | |
| "ajgt_acc": 80.0, | |
| "sadid_sacrebleu": 2.011344093710465, | |
| "xquad_f1": 59.583333333333336, | |
| "arabic_mmlu_acc": 40.0, | |
| "arasum_rougeLsum": 23.33, | |
| "arabic_exams_acc": 50.0, | |
| "arabench_mt_sacrebleu": 2.787100692195652, | |
| "kind_acc": 21.43, | |
| "hard_acc": 100.0, | |
| "athar_sacrebleu": 2.5089122510739075, | |
| "score": 46.49, | |
| "request_id": "recovered_KFUPM-JRCAI/Qwen3-4B-Instruct-2507-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen2.5-7B-Instruct-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 8.759901780348558, | |
| "quality_overall": 40.32, | |
| "timestamp": "2026-03-12T10:13:31.495298+00:00", | |
| "mlqa_f1": 66.66666666666666, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 30.0, | |
| "aramath_acc": 90.0, | |
| "arabench_dialect_acc": 66.67, | |
| "ajgt_acc": 60.0, | |
| "sadid_sacrebleu": 1.6653146689964888, | |
| "xquad_f1": 53.23809523809524, | |
| "arabic_mmlu_acc": 30.0, | |
| "arasum_rougeLsum": 25.0, | |
| "arabic_exams_acc": 60.0, | |
| "arabench_mt_sacrebleu": 3.945059568269401, | |
| "kind_acc": 14.29, | |
| "hard_acc": 100.0, | |
| "athar_sacrebleu": 3.2645630347395977, | |
| "score": 37.07, | |
| "request_id": "recovered_OpenVINO/Qwen2.5-7B-Instruct-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/gemma-2-9b-it-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T09:49:26.387381+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/gemma-2-9b-it-int4-ov" | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/AceGPT-v2-8B-Chat-int4-ov", | |
| "status": "CompletedWithErrors", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T09:44:58.016714+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_KFUPM-JRCAI/AceGPT-v2-8B-Chat-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Phi-4-mini-instruct-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T09:16:05.301013+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/Phi-4-mini-instruct-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/falcon-7b-instruct-int4-cw-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T09:12:48.062866+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/falcon-7b-instruct-int4-cw-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/gpt-oss-20b-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T09:12:37.013971+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/gpt-oss-20b-int4-ov" | |
| }, | |
| { | |
| "model_name": "millexplore/Qwen3-4B-Instruct-2507-openvino", | |
| "status": "CompletedWithErrors", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-12T08:49:16.978144+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_millexplore/Qwen3-4B-Instruct-2507-openvino" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen2.5-1.5B-Instruct-fp16-ov", | |
| "status": "Completed", | |
| "avg_tps": 16.800915198448376, | |
| "quality_overall": 36.39, | |
| "timestamp": "2026-03-11T13:39:47.002379+00:00", | |
| "mlqa_f1": 48.33333333333333, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 40.0, | |
| "aramath_acc": 40.0, | |
| "arabench_dialect_acc": 33.33, | |
| "ajgt_acc": 80.0, | |
| "sadid_sacrebleu": 3.4954168390612934, | |
| "xquad_f1": 55.07142857142857, | |
| "arabic_mmlu_acc": 60.0, | |
| "arasum_rougeLsum": 20.0, | |
| "arabic_exams_acc": 60.0, | |
| "arabench_mt_sacrebleu": 2.8270850624848434, | |
| "kind_acc": 0.0, | |
| "hard_acc": 100.0, | |
| "athar_sacrebleu": 2.754073786461146, | |
| "score": 48.77, | |
| "request_id": "recovered_OpenVINO/Qwen2.5-1.5B-Instruct-fp16-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen3-4B-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 6.02189702865405, | |
| "quality_overall": 5.86, | |
| "timestamp": "2026-03-11T12:54:27.203466+00:00", | |
| "mlqa_f1": 0.0, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 0.0, | |
| "aramath_acc": 0.0, | |
| "arabench_dialect_acc": 0.0, | |
| "ajgt_acc": 50.0, | |
| "sadid_sacrebleu": 0.48203182081601337, | |
| "xquad_f1": 0.0, | |
| "arabic_mmlu_acc": 0.0, | |
| "arasum_rougeLsum": 0.0, | |
| "arabic_exams_acc": 10.0, | |
| "arabench_mt_sacrebleu": 0.3394439679118444, | |
| "kind_acc": 7.14, | |
| "hard_acc": 20.0, | |
| "athar_sacrebleu": 0.0, | |
| "score": 11.72, | |
| "request_id": "recovered_OpenVINO/Qwen3-4B-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen3-0.6B-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-11T12:36:14.361125+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/Qwen3-0.6B-int4-ov" | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/jais-family-6p7b-chat-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-11T12:10:46.627002+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_KFUPM-JRCAI/jais-family-6p7b-chat-int4-ov" | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/ALLaM-7B-Instruct-preview-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 5.808571665714949, | |
| "quality_overall": 38.14, | |
| "timestamp": "2026-03-11T12:10:31.197162+00:00", | |
| "mlqa_f1": 60.83333333333333, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 40.0, | |
| "aramath_acc": 50.0, | |
| "arabench_dialect_acc": 16.67, | |
| "ajgt_acc": 70.0, | |
| "sadid_sacrebleu": 5.3482246360356696, | |
| "xquad_f1": 63.82783882783882, | |
| "arabic_mmlu_acc": 50.0, | |
| "arasum_rougeLsum": 23.33, | |
| "arabic_exams_acc": 70.0, | |
| "arabench_mt_sacrebleu": 2.9402573301233126, | |
| "kind_acc": 14.29, | |
| "hard_acc": 100.0, | |
| "athar_sacrebleu": 4.793400075020296, | |
| "score": 29.36, | |
| "request_id": "recovered_KFUPM-JRCAI/ALLaM-7B-Instruct-preview-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/gemma-7b-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-11T10:49:30.895820+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/gemma-7b-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Mistral-7B-Instruct-v0.3-int4-cw-ov", | |
| "status": "Completed", | |
| "avg_tps": 13.131784715323613, | |
| "quality_overall": 25.67, | |
| "timestamp": "2026-03-11T10:33:56.243306+00:00", | |
| "mlqa_f1": 39.58333333333333, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 40.0, | |
| "aramath_acc": 30.0, | |
| "arabench_dialect_acc": 16.67, | |
| "ajgt_acc": 60.0, | |
| "sadid_sacrebleu": 2.6837669482199527, | |
| "xquad_f1": 11.307692307692308, | |
| "arabic_mmlu_acc": 20.0, | |
| "arasum_rougeLsum": 0.0, | |
| "arabic_exams_acc": 50.0, | |
| "arabench_mt_sacrebleu": 2.4571228583303166, | |
| "kind_acc": 7.14, | |
| "hard_acc": 100.0, | |
| "athar_sacrebleu": 5.156716889677759, | |
| "score": 36.21, | |
| "request_id": "recovered_OpenVINO/Mistral-7B-Instruct-v0.3-int4-cw-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen2-0.5B-Instruct-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-11T09:28:41.854998+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null, | |
| "request_id": "recovered_OpenVINO/Qwen2-0.5B-Instruct-int4-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Phi-3.5-mini-instruct-int4-cw-ov", | |
| "status": "Completed", | |
| "avg_tps": 13.45539261390423, | |
| "quality_overall": 19.57, | |
| "timestamp": "2026-03-10T15:23:23.405996+00:00", | |
| "mlqa_f1": 45.05050505050505, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 20.0, | |
| "aramath_acc": 10.0, | |
| "arabench_dialect_acc": 16.67, | |
| "ajgt_acc": 60.0, | |
| "sadid_sacrebleu": 0.601106550693938, | |
| "xquad_f1": 12.366447278211984, | |
| "arabic_mmlu_acc": 10.0, | |
| "arasum_rougeLsum": 0.0, | |
| "arabic_exams_acc": 30.0, | |
| "arabench_mt_sacrebleu": 0.7997641010436032, | |
| "kind_acc": 7.14, | |
| "hard_acc": 80.0, | |
| "athar_sacrebleu": 0.8638370218116239, | |
| "score": 32.01, | |
| "request_id": "recovered_OpenVINO/Phi-3.5-mini-instruct-int4-cw-ov" | |
| }, | |
| { | |
| "model_name": "OpenVINO/Qwen2.5-1.5B-Instruct-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 25.70370738934248, | |
| "quality_overall": 29.65, | |
| "timestamp": "2026-03-10T14:44:07.319502+00:00", | |
| "mlqa_f1": 26.111111111111114, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 20.0, | |
| "aramath_acc": 30.0, | |
| "arabench_dialect_acc": 50.0, | |
| "ajgt_acc": 80.0, | |
| "sadid_sacrebleu": 2.4081636804211675, | |
| "xquad_f1": 45.57142857142857, | |
| "arabic_mmlu_acc": 30.0, | |
| "arasum_rougeLsum": 20.0, | |
| "arabic_exams_acc": 40.0, | |
| "arabench_mt_sacrebleu": 2.8044414465956473, | |
| "kind_acc": 7.14, | |
| "hard_acc": 90.0, | |
| "athar_sacrebleu": 0.729756798756686, | |
| "score": 54.45, | |
| "request_id": "recovered_OpenVINO/Qwen2.5-1.5B-Instruct-int4-ov" | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/Llama-3.2-3B-Instruct-int4-ov", | |
| "status": "Completed", | |
| "avg_tps": 16.290223677953083, | |
| "quality_overall": 18.93, | |
| "timestamp": "2026-03-16T08:58:09.287519+00:00", | |
| "mlqa_f1": 40.0, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": 0.0, | |
| "ht_arabic_mmlu_acc": 10.0, | |
| "aramath_acc": 40.0, | |
| "arabench_dialect_acc": 33.33, | |
| "ajgt_acc": 40.0, | |
| "sadid_sacrebleu": 2.4248479779029766, | |
| "xquad_f1": 8.571428571428571, | |
| "arabic_mmlu_acc": 20.0, | |
| "arasum_rougeLsum": 10.0, | |
| "arabic_exams_acc": 30.0, | |
| "arabench_mt_sacrebleu": 2.3772856946608285, | |
| "kind_acc": 7.14, | |
| "hard_acc": 40.0, | |
| "athar_sacrebleu": 0.03719441572165816, | |
| "score": 43.51 | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/Mistral-Nemo-Instruct-2407-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-16T07:49:15.135760+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null | |
| }, | |
| { | |
| "model_name": "KFUPM-JRCAI/Zamba2-2.7B-instruct-int4-ov", | |
| "status": "Failed", | |
| "avg_tps": null, | |
| "quality_overall": null, | |
| "timestamp": "2026-03-16T07:40:30.416126+00:00", | |
| "mlqa_f1": null, | |
| "iwslt2017-en-ar_sacrebleu": null, | |
| "xlsum_rougeLsum": null, | |
| "ht_arabic_mmlu_acc": null, | |
| "aramath_acc": null, | |
| "arabench_dialect_acc": null, | |
| "ajgt_acc": null, | |
| "sadid_sacrebleu": null, | |
| "xquad_f1": null, | |
| "arabic_mmlu_acc": null, | |
| "arasum_rougeLsum": null, | |
| "arabic_exams_acc": null, | |
| "arabench_mt_sacrebleu": null, | |
| "kind_acc": null, | |
| "hard_acc": null, | |
| "athar_sacrebleu": null, | |
| "score": null | |
| } | |
| ] |