Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
{ | |
"timestamp": "2025-04-01T10:30:15.307581", | |
"models": { | |
"Qwen/Qwen2.5-72B-Instruct": [ | |
{ | |
"provider": "sambanova", | |
"total_time": 21.616381883621216, | |
"success_rate": 1.0, | |
"average_time": 4.323276376724243 | |
}, | |
{ | |
"provider": "together", | |
"total_time": 21.84441828727722, | |
"success_rate": 1.0, | |
"average_time": 4.368883657455444 | |
}, | |
{ | |
"provider": "nebius", | |
"total_time": 22.003292322158813, | |
"success_rate": 1.0, | |
"average_time": 4.400658464431762 | |
}, | |
{ | |
"provider": "fireworks-ai", | |
"total_time": 22.086440563201904, | |
"success_rate": 1.0, | |
"average_time": 4.417288112640381 | |
}, | |
{ | |
"provider": "novita", | |
"total_time": 22.16641402244568, | |
"success_rate": 1.0, | |
"average_time": 4.433282804489136 | |
}, | |
{ | |
"provider": "hf-inference", | |
"total_time": 22.41838788986206, | |
"success_rate": 1.0, | |
"average_time": 4.483677577972412 | |
}, | |
{ | |
"provider": "hyperbolic", | |
"total_time": 23.555410146713257, | |
"success_rate": 1.0, | |
"average_time": 4.711082029342651 | |
} | |
], | |
"meta-llama/Llama-3.3-70B-Instruct": [ | |
{ | |
"provider": "novita", | |
"total_time": 28.36034393310547, | |
"success_rate": 1.0, | |
"average_time": 5.672068786621094 | |
}, | |
{ | |
"provider": "fireworks-ai", | |
"total_time": 31.595482110977173, | |
"success_rate": 1.0, | |
"average_time": 6.319096422195434 | |
}, | |
{ | |
"provider": "sambanova", | |
"total_time": 31.845455646514893, | |
"success_rate": 1.0, | |
"average_time": 6.369091129302978 | |
}, | |
{ | |
"provider": "nebius", | |
"total_time": 31.963874578475952, | |
"success_rate": 1.0, | |
"average_time": 6.39277491569519 | |
}, | |
{ | |
"provider": "hyperbolic", | |
"total_time": 35.02063775062561, | |
"success_rate": 1.0, | |
"average_time": 7.004127550125122 | |
}, | |
{ | |
"provider": "together", | |
"total_time": 36.88544726371765, | |
"success_rate": 1.0, | |
"average_time": 7.3770894527435305 | |
}, | |
{ | |
"provider": "hf-inference", | |
"total_time": 37.26896572113037, | |
"success_rate": 1.0, | |
"average_time": 7.453793144226074 | |
}, | |
{ | |
"provider": "cerebras", | |
"total_time": 37.70701003074646, | |
"success_rate": 1.0, | |
"average_time": 7.541402006149292 | |
} | |
], | |
"deepseek-ai/DeepSeek-R1-Distill-Llama-70B": null, | |
"Qwen/QwQ-32B": [ | |
{ | |
"provider": "sambanova", | |
"total_time": 25.050092935562134, | |
"success_rate": 1.0, | |
"average_time": 5.010018587112427 | |
}, | |
{ | |
"provider": "novita", | |
"total_time": 25.061633110046387, | |
"success_rate": 1.0, | |
"average_time": 5.012326622009278 | |
}, | |
{ | |
"provider": "hyperbolic", | |
"total_time": 25.363604307174683, | |
"success_rate": 1.0, | |
"average_time": 5.072720861434936 | |
}, | |
{ | |
"provider": "nebius", | |
"total_time": 25.37495517730713, | |
"success_rate": 1.0, | |
"average_time": 5.074991035461426 | |
}, | |
{ | |
"provider": "hf-inference", | |
"total_time": 25.41055965423584, | |
"success_rate": 1.0, | |
"average_time": 5.082111930847168 | |
}, | |
{ | |
"provider": "fireworks-ai", | |
"total_time": 25.595581769943237, | |
"success_rate": 1.0, | |
"average_time": 5.119116353988647 | |
} | |
], | |
"mistralai/Mistral-Small-24B-Instruct-2501": null | |
} | |
} |