jayminban's picture
files addition
5342d35
,Model Name,Total Time,GPU Util Time,Reasoning & Math Mean Score,Reasoning & Math Avg. Rank
1,google_gemma-3-12b-it,15h 45m,14h 8m,0.6266,1
2,Qwen_Qwen3-8B,15h 31m,13h 44m,0.6214,2
3,Qwen_Qwen3-14B (8bit),29h 45m,17h 29m,0.586,3
4,Qwen_Qwen3-4B,5h 51m,5h 3m,0.5712,4
5,Qwen_Qwen2.5-7B-Instruct,9h 36m,8h 33m,0.5541,5
6,openchat_openchat-3.6-8b-20240522,7h 51m,6h 59m,0.5505,6
7,Qwen_Qwen2.5-14B-Instruct (8bit),52h 44m,29h 32m,0.5488,7
8,mistralai_Ministral-8B-Instruct-2410,10h 46m,9h 27m,0.5446,8
9,01-ai_Yi-1.5-9B-Chat,13h 54m,12h 15m,0.5399,9
10,deepseek-ai_DeepSeek-R1-0528-Qwen3-8B,17h 57m,15h 30m,0.5387,10
11,google_gemma-3-4b-it,4h 51m,3h 50m,0.5374,11
12,meta-llama_Llama-3.1-8B-Instruct,12h 19m,10h 52m,0.5366,12
13,meta-llama_Meta-Llama-3-8B-Instruct,6h 30m,5h 46m,0.5286,13
14,Qwen_Qwen2-7B-Instruct,11h 30m,10h 11m,0.5285,14
15,Qwen_Qwen2.5-7B-Instruct-1M,11h 17m,10h 10m,0.5245,15
16,01-ai_Yi-1.5-9B,11h 43m,10h 26m,0.5206,16
17,NousResearch_Hermes-2-Pro-Mistral-7B,8h 27m,7h 28m,0.5184,17
18,Qwen_Qwen2.5-Math-7B,27h 21m,24h 38m,0.501,18
19,01-ai_Yi-1.5-6B-Chat,8h 4m,7h 1m,0.5006,19
20,Qwen_Qwen2.5-Math-7B-Instruct,5h 37m,4h 57m,0.4997,20
21,deepseek-ai_DeepSeek-R1-Distill-Qwen-7B,6h 28m,5h 43m,0.4841,21
22,mistralai_Mistral-7B-Instruct-v0.3,8h 38m,7h 41m,0.4704,22
23,meta-llama_Llama-3.2-3B-Instruct,7h 12m,5h 57m,0.4688,23
24,01-ai_Yi-1.5-6B,4h 28m,3h 54m,0.4495,24
25,Qwen_Qwen3-1.7B,4h 25m,3h 36m,0.4493,25
26,deepseek-ai_DeepSeek-R1-Distill-Llama-8B,11h 46m,10h 36m,0.4469,26
27,deepseek-ai_deepseek-llm-7b-chat,10h 6m,9h 8m,0.4244,27
28,meta-llama_Llama-2-13b-chat-hf,17h 8m,15h 37m,0.4143,28
29,Qwen_Qwen2.5-Math-1.5B-Instruct,3h 25m,2h 39m,0.4085,29
30,deepseek-ai_DeepSeek-R1-Distill-Qwen-1.5B,3h 40m,2h 52m,0.4009,30
31,Qwen_Qwen2.5-1.5B-Instruct,3h 20m,2h 36m,0.3874,31
32,Qwen_Qwen2.5-3B-Instruct,7h 48m,6h 30m,0.3823,32
33,meta-llama_Llama-2-13b-hf,19h 21m,17h 38m,0.3719,33
34,deepseek-ai_deepseek-math-7b-rl,8h 2m,7h 12m,0.3702,34
35,meta-llama_Llama-2-7b-chat-hf,6h 57m,6h 7m,0.3674,35
36,Qwen_Qwen3-0.6B,3h 45m,2h 53m,0.3494,36
37,meta-llama_Llama-3.2-1B-Instruct,3h 30m,2h 35m,0.345,37
38,deepseek-ai_deepseek-llm-7b-base,7h 11m,6h 26m,0.3377,38
39,meta-llama_Llama-2-7b-hf,5h 42m,4h 59m,0.3361,39
40,google_gemma-3-1b-it,6h 50m,4h 52m,0.3312,40
41,Qwen_Qwen2.5-0.5B-Instruct,2h 34m,1h 48m,0.2914,41