Gemini 3 and other "thinking" models require reasoning blocks with thought_signature to be preserved in subsequent requests when using tool calls. This enables the model to resume its chain of thought. Changes: - Add ReasoningContent struct for reasoning blocks - Add reasoning field to ChatMessage and ChatResponse - Parse reasoning from OpenRouter responses - Preserve reasoning when building assistant messages with tool calls Reference: https://openrouter.ai/docs/use-cases/reasoning-tokens
12402 lines
288 KiB
JSON
12402 lines
288 KiB
JSON
{
|
|
"generated_at": "2025-12-19T07:20:20Z",
|
|
"total_models": 350,
|
|
"models_with_benchmarks": 158,
|
|
"categories": [
|
|
"code",
|
|
"math",
|
|
"reasoning",
|
|
"tool_calling",
|
|
"long_context",
|
|
"general"
|
|
],
|
|
"families": {
|
|
"gemini-flash": {
|
|
"latest": "google/gemini-3-flash-preview",
|
|
"members": [
|
|
"google/gemini-3-flash-preview",
|
|
"google/gemini-2.5-flash-image",
|
|
"google/gemini-2.5-flash-preview-09-2025",
|
|
"google/gemini-2.5-flash-image-preview",
|
|
"google/gemini-2.5-flash",
|
|
"google/gemini-2.0-flash-001",
|
|
"google/gemini-2.0-flash-exp:free"
|
|
],
|
|
"tier": "fast"
|
|
},
|
|
"mistral-small": {
|
|
"latest": "mistralai/mistral-small-24b-instruct-2501",
|
|
"members": [
|
|
"mistralai/mistral-small-24b-instruct-2501",
|
|
"mistralai/mistral-small-3.2-24b-instruct",
|
|
"mistralai/mistral-small-3.1-24b-instruct:free",
|
|
"mistralai/mistral-small-3.1-24b-instruct",
|
|
"mistralai/mistral-small-creative"
|
|
],
|
|
"tier": "fast"
|
|
},
|
|
"mistral-large": {
|
|
"latest": "mistralai/mistral-large-2512",
|
|
"members": [
|
|
"mistralai/mistral-large-2512",
|
|
"mistralai/mistral-large-2411",
|
|
"mistralai/mistral-large-2407",
|
|
"mistralai/mistral-large"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"claude-opus": {
|
|
"latest": "anthropic/claude-opus-4.5",
|
|
"members": [
|
|
"anthropic/claude-opus-4.5",
|
|
"anthropic/claude-opus-4.1",
|
|
"anthropic/claude-opus-4",
|
|
"anthropic/claude-3-opus"
|
|
],
|
|
"tier": "flagship"
|
|
},
|
|
"gemini-pro": {
|
|
"latest": "google/gemini-3-pro-image-preview",
|
|
"members": [
|
|
"google/gemini-3-pro-image-preview",
|
|
"google/gemini-3-pro-preview",
|
|
"google/gemini-2.5-pro",
|
|
"google/gemini-2.5-pro-preview",
|
|
"google/gemini-2.5-pro-preview-05-06"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"claude-haiku": {
|
|
"latest": "anthropic/claude-haiku-4.5",
|
|
"members": [
|
|
"anthropic/claude-haiku-4.5",
|
|
"anthropic/claude-3.5-haiku",
|
|
"anthropic/claude-3-haiku"
|
|
],
|
|
"tier": "fast"
|
|
},
|
|
"claude-sonnet": {
|
|
"latest": "anthropic/claude-sonnet-4.5",
|
|
"members": [
|
|
"anthropic/claude-sonnet-4.5",
|
|
"anthropic/claude-sonnet-4",
|
|
"anthropic/claude-3.7-sonnet",
|
|
"anthropic/claude-3.5-sonnet"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"deepseek-chat": {
|
|
"latest": "deepseek/deepseek-chat-v3-0324",
|
|
"members": [
|
|
"deepseek/deepseek-chat-v3-0324",
|
|
"deepseek/deepseek-chat-v3.1",
|
|
"deepseek/deepseek-chat"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"mistral-medium": {
|
|
"latest": "mistralai/mistral-medium-3.1",
|
|
"members": [
|
|
"mistralai/mistral-medium-3.1",
|
|
"mistralai/mistral-medium-3"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"gpt-4": {
|
|
"latest": "openai/gpt-4.1",
|
|
"members": [
|
|
"openai/gpt-4.1",
|
|
"openai/gpt-4o",
|
|
"openai/gpt-4-turbo",
|
|
"openai/gpt-4-turbo-preview"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"gpt-4-mini": {
|
|
"latest": "openai/gpt-4.1-mini",
|
|
"members": [
|
|
"openai/gpt-4.1-mini",
|
|
"openai/gpt-4o-mini"
|
|
],
|
|
"tier": "fast"
|
|
},
|
|
"qwq": {
|
|
"latest": "qwen/qwq-32b",
|
|
"members": [
|
|
"qwen/qwq-32b"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"o3-mini": {
|
|
"latest": "openai/o3-mini-high",
|
|
"members": [
|
|
"openai/o3-mini-high",
|
|
"openai/o3-mini"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"deepseek-r1": {
|
|
"latest": "deepseek/deepseek-r1",
|
|
"members": [
|
|
"deepseek/deepseek-r1"
|
|
],
|
|
"tier": "flagship"
|
|
},
|
|
"o1": {
|
|
"latest": "openai/o1",
|
|
"members": [
|
|
"openai/o1"
|
|
],
|
|
"tier": "flagship"
|
|
},
|
|
"llama-3-70b": {
|
|
"latest": "meta-llama/llama-3.3-70b-instruct:free",
|
|
"members": [
|
|
"meta-llama/llama-3.3-70b-instruct:free",
|
|
"meta-llama/llama-3.3-70b-instruct"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"llama-3-90b": {
|
|
"latest": "meta-llama/llama-3.2-90b-vision-instruct",
|
|
"members": [
|
|
"meta-llama/llama-3.2-90b-vision-instruct"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"qwen-72b": {
|
|
"latest": "qwen/qwen-2.5-72b-instruct",
|
|
"members": [
|
|
"qwen/qwen-2.5-72b-instruct"
|
|
],
|
|
"tier": "mid"
|
|
},
|
|
"llama-3-405b": {
|
|
"latest": "meta-llama/llama-3.1-405b",
|
|
"members": [
|
|
"meta-llama/llama-3.1-405b",
|
|
"meta-llama/llama-3.1-405b-instruct:free",
|
|
"meta-llama/llama-3.1-405b-instruct"
|
|
],
|
|
"tier": "flagship"
|
|
}
|
|
},
|
|
"aliases": {
|
|
"google/gemini-2.5-flash-image": "google/gemini-3-flash-preview",
|
|
"gemini-2.5-flash-image": "google/gemini-3-flash-preview",
|
|
"google/gemini-2.5-flash-preview-09-2025": "google/gemini-3-flash-preview",
|
|
"gemini-2.5-flash-preview-09-2025": "google/gemini-3-flash-preview",
|
|
"google/gemini-2.5-flash-image-preview": "google/gemini-3-flash-preview",
|
|
"gemini-2.5-flash-image-preview": "google/gemini-3-flash-preview",
|
|
"google/gemini-2.5-flash": "google/gemini-3-flash-preview",
|
|
"gemini-2.5-flash": "google/gemini-3-flash-preview",
|
|
"google/gemini-2.0-flash-001": "google/gemini-3-flash-preview",
|
|
"gemini-2.0-flash-001": "google/gemini-3-flash-preview",
|
|
"google/gemini-2.0-flash-exp:free": "google/gemini-3-flash-preview",
|
|
"gemini-2.0-flash-exp:free": "google/gemini-3-flash-preview",
|
|
"gemini-flash": "google/gemini-3-flash-preview",
|
|
"mistralai/mistral-small-3.2-24b-instruct": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistral-small-3.2-24b-instruct": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistralai/mistral-small-3.1-24b-instruct:free": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistral-small-3.1-24b-instruct:free": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistralai/mistral-small-3.1-24b-instruct": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistral-small-3.1-24b-instruct": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistralai/mistral-small-creative": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistral-small-creative": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistral-small": "mistralai/mistral-small-24b-instruct-2501",
|
|
"mistralai/mistral-large-2411": "mistralai/mistral-large-2512",
|
|
"mistral-large-2411": "mistralai/mistral-large-2512",
|
|
"mistralai/mistral-large-2407": "mistralai/mistral-large-2512",
|
|
"mistral-large-2407": "mistralai/mistral-large-2512",
|
|
"mistralai/mistral-large": "mistralai/mistral-large-2512",
|
|
"mistral-large": "mistralai/mistral-large-2512",
|
|
"anthropic/claude-opus-4.1": "anthropic/claude-opus-4.5",
|
|
"claude-opus-4.1": "anthropic/claude-opus-4.5",
|
|
"anthropic/claude-opus-4": "anthropic/claude-opus-4.5",
|
|
"claude-opus-4": "anthropic/claude-opus-4.5",
|
|
"anthropic/claude-3-opus": "anthropic/claude-opus-4.5",
|
|
"claude-3-opus": "anthropic/claude-opus-4.5",
|
|
"claude-opus": "anthropic/claude-opus-4.5",
|
|
"opus": "anthropic/claude-opus-4.5",
|
|
"claude opus": "anthropic/claude-opus-4.5",
|
|
"google/gemini-3-pro-preview": "google/gemini-3-pro-image-preview",
|
|
"gemini-3-pro-preview": "google/gemini-3-pro-image-preview",
|
|
"google/gemini-2.5-pro": "google/gemini-3-pro-image-preview",
|
|
"gemini-2.5-pro": "google/gemini-3-pro-image-preview",
|
|
"google/gemini-2.5-pro-preview": "google/gemini-3-pro-image-preview",
|
|
"gemini-2.5-pro-preview": "google/gemini-3-pro-image-preview",
|
|
"google/gemini-2.5-pro-preview-05-06": "google/gemini-3-pro-image-preview",
|
|
"gemini-2.5-pro-preview-05-06": "google/gemini-3-pro-image-preview",
|
|
"gemini-pro": "google/gemini-3-pro-image-preview",
|
|
"anthropic/claude-3.5-haiku": "anthropic/claude-haiku-4.5",
|
|
"claude-3.5-haiku": "anthropic/claude-haiku-4.5",
|
|
"anthropic/claude-3-haiku": "anthropic/claude-haiku-4.5",
|
|
"claude-3-haiku": "anthropic/claude-haiku-4.5",
|
|
"claude-haiku": "anthropic/claude-haiku-4.5",
|
|
"haiku": "anthropic/claude-haiku-4.5",
|
|
"claude haiku": "anthropic/claude-haiku-4.5",
|
|
"anthropic/claude-sonnet-4": "anthropic/claude-sonnet-4.5",
|
|
"claude-sonnet-4": "anthropic/claude-sonnet-4.5",
|
|
"anthropic/claude-3.7-sonnet": "anthropic/claude-sonnet-4.5",
|
|
"claude-3.7-sonnet": "anthropic/claude-sonnet-4.5",
|
|
"anthropic/claude-3.5-sonnet": "anthropic/claude-sonnet-4.5",
|
|
"claude-3.5-sonnet": "anthropic/claude-sonnet-4.5",
|
|
"claude-sonnet": "anthropic/claude-sonnet-4.5",
|
|
"sonnet": "anthropic/claude-sonnet-4.5",
|
|
"claude sonnet": "anthropic/claude-sonnet-4.5",
|
|
"deepseek/deepseek-chat-v3.1": "deepseek/deepseek-chat-v3-0324",
|
|
"deepseek-chat-v3.1": "deepseek/deepseek-chat-v3-0324",
|
|
"deepseek/deepseek-chat": "deepseek/deepseek-chat-v3-0324",
|
|
"deepseek-chat": "deepseek/deepseek-chat-v3-0324",
|
|
"mistralai/mistral-medium-3": "mistralai/mistral-medium-3.1",
|
|
"mistral-medium-3": "mistralai/mistral-medium-3.1",
|
|
"mistral-medium": "mistralai/mistral-medium-3.1",
|
|
"openai/gpt-4o": "openai/gpt-4.1",
|
|
"gpt-4o": "openai/gpt-4.1",
|
|
"openai/gpt-4-turbo": "openai/gpt-4.1",
|
|
"gpt-4-turbo": "openai/gpt-4.1",
|
|
"openai/gpt-4-turbo-preview": "openai/gpt-4.1",
|
|
"gpt-4-turbo-preview": "openai/gpt-4.1",
|
|
"gpt-4": "openai/gpt-4.1",
|
|
"gpt4": "openai/gpt-4.1",
|
|
"openai/gpt-4o-mini": "openai/gpt-4.1-mini",
|
|
"gpt-4o-mini": "openai/gpt-4.1-mini",
|
|
"gpt-4-mini": "openai/gpt-4.1-mini",
|
|
"gpt4-mini": "openai/gpt-4.1-mini",
|
|
"qwq": "qwen/qwq-32b",
|
|
"openai/o3-mini": "openai/o3-mini-high",
|
|
"o3-mini": "openai/o3-mini-high",
|
|
"deepseek-r1": "deepseek/deepseek-r1",
|
|
"o1": "openai/o1",
|
|
"meta-llama/llama-3.3-70b-instruct": "meta-llama/llama-3.3-70b-instruct:free",
|
|
"llama-3.3-70b-instruct": "meta-llama/llama-3.3-70b-instruct:free",
|
|
"llama-3-70b": "meta-llama/llama-3.3-70b-instruct:free",
|
|
"llama-3-90b": "meta-llama/llama-3.2-90b-vision-instruct",
|
|
"qwen-72b": "qwen/qwen-2.5-72b-instruct",
|
|
"meta-llama/llama-3.1-405b-instruct:free": "meta-llama/llama-3.1-405b",
|
|
"llama-3.1-405b-instruct:free": "meta-llama/llama-3.1-405b",
|
|
"meta-llama/llama-3.1-405b-instruct": "meta-llama/llama-3.1-405b",
|
|
"llama-3.1-405b-instruct": "meta-llama/llama-3.1-405b",
|
|
"llama-3-405b": "meta-llama/llama-3.1-405b"
|
|
},
|
|
"models": [
|
|
{
|
|
"id": "google/gemini-3-flash-preview",
|
|
"name": "Google: Gemini 3 Flash Preview",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"audio": "0.000001",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.78
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.997
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.904
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.78,
|
|
"math": 0.997,
|
|
"reasoning": 0.904
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-small-creative",
|
|
"name": "Mistral: Mistral Small Creative",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "allenai/olmo-3.1-32b-think:free",
|
|
"name": "AllenAI: Olmo 3.1 32B Think (free)",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "xiaomi/mimo-v2-flash:free",
|
|
"name": "Xiaomi: MiMo-V2-Flash (free)",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.734
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.941
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.837,
|
|
"mmlu-pro": 0.849
|
|
},
|
|
"tool_calling": {
|
|
"tau-bench": 0.803
|
|
},
|
|
"long_context": {
|
|
"longbench-v2": 0.606
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.734,
|
|
"math": 0.941,
|
|
"reasoning": 0.843,
|
|
"tool_calling": 0.803,
|
|
"long_context": 0.606
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-3-nano-30b-a3b:free",
|
|
"name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.388
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.992
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.75,
|
|
"mmlu-pro": 0.783
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.388,
|
|
"math": 0.992,
|
|
"reasoning": 0.7665
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-3-nano-30b-a3b",
|
|
"name": "NVIDIA: Nemotron 3 Nano 30B A3B",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000024",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.388
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.992
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.75,
|
|
"mmlu-pro": 0.783
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.388,
|
|
"math": 0.992,
|
|
"reasoning": 0.7665
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.2-chat",
|
|
"name": "OpenAI: GPT-5.2 Chat",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000175",
|
|
"completion": "0.000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000175"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.2-pro",
|
|
"name": "OpenAI: GPT-5.2 Pro",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000021",
|
|
"completion": "0.000168",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.932
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 1.0,
|
|
"reasoning": 0.932
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.2",
|
|
"name": "OpenAI: GPT-5.2",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000175",
|
|
"completion": "0.000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000175"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.8
|
|
},
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.924
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8,
|
|
"math": 1.0,
|
|
"reasoning": 0.924
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/devstral-2512:free",
|
|
"name": "Mistral: Devstral 2 2512 (free)",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/devstral-2512",
|
|
"name": "Mistral: Devstral 2 2512",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "relace/relace-search",
|
|
"name": "Relace: Relace Search",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.6v",
|
|
"name": "Z.AI: GLM 4.6V",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000009",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.68
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.939
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.68,
|
|
"math": 0.939,
|
|
"reasoning": 0.81
|
|
}
|
|
},
|
|
{
|
|
"id": "nex-agi/deepseek-v3.1-nex-n1:free",
|
|
"name": "Nex AGI: DeepSeek V3.1 Nex N1 (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0",
|
|
"input_cache_write": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.66,
|
|
"livecodebench": 0.564,
|
|
"aider-polyglot": 0.684,
|
|
"codeforces": 0.697
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.498,
|
|
"aime-2024": 0.663
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.749,
|
|
"mmlu-pro": 0.837
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6512,
|
|
"math": 0.5805,
|
|
"reasoning": 0.793
|
|
}
|
|
},
|
|
{
|
|
"id": "essentialai/rnj-1-instruct",
|
|
"name": "EssentialAI: Rnj 1 Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.00000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openrouter/bodybuilder",
|
|
"name": "Body Builder (beta)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Router",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "-1",
|
|
"completion": "-1"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.1-codex-max",
|
|
"name": "OpenAI: GPT-5.1-Codex-Max",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.737
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.737
|
|
}
|
|
},
|
|
{
|
|
"id": "amazon/nova-2-lite-v1",
|
|
"name": "Amazon: Nova 2 Lite",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"video",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Nova",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/ministral-14b-2512",
|
|
"name": "Mistral: Ministral 3 14B 2512",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/ministral-8b-2512",
|
|
"name": "Mistral: Ministral 3 8B 2512",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.00000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/ministral-3b-2512",
|
|
"name": "Mistral: Ministral 3 3B 2512",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-large-2512",
|
|
"name": "Mistral: Mistral Large 3 2512",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.0000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arcee-ai/trinity-mini:free",
|
|
"name": "Arcee AI: Trinity Mini (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arcee-ai/trinity-mini",
|
|
"name": "Arcee AI: Trinity Mini",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000045",
|
|
"completion": "0.00000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-v3.2-speciale",
|
|
"name": "DeepSeek: DeepSeek V3.2 Speciale",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000027",
|
|
"completion": "0.00000041",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.731,
|
|
"codeforces": 0.9
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.96
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8155,
|
|
"math": 0.96
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-v3.2",
|
|
"name": "DeepSeek: DeepSeek V3.2",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000024",
|
|
"completion": "0.00000038",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000011",
|
|
"input_cache_write": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.731,
|
|
"codeforces": 0.9
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.96
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8155,
|
|
"math": 0.96
|
|
}
|
|
},
|
|
{
|
|
"id": "prime-intellect/intellect-3",
|
|
"name": "Prime Intellect: INTELLECT-3",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "tngtech/tng-r1t-chimera:free",
|
|
"name": "TNG: R1T Chimera (free)",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "tngtech/tng-r1t-chimera",
|
|
"name": "TNG: R1T Chimera",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-opus-4.5",
|
|
"name": "Anthropic: Claude Opus 4.5",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000005",
|
|
"completion": "0.000025",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000005",
|
|
"input_cache_write": "0.00000625"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.725
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.755
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.796
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.725,
|
|
"math": 0.755,
|
|
"reasoning": 0.796
|
|
}
|
|
},
|
|
{
|
|
"id": "allenai/olmo-3-32b-think:free",
|
|
"name": "AllenAI: Olmo 3 32B Think (free)",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "allenai/olmo-3-7b-instruct",
|
|
"name": "AllenAI: Olmo 3 7B Instruct",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "allenai/olmo-3-7b-think",
|
|
"name": "AllenAI: Olmo 3 7B Think",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000012",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-3-pro-image-preview",
|
|
"name": "Google: Nano Banana Pro (Gemini 3 Pro Image Preview)",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text+image->text+image",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000012",
|
|
"request": "0",
|
|
"image": "0.067",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "x-ai/grok-4.1-fast",
|
|
"name": "xAI: Grok 4.1 Fast",
|
|
"context_length": 2000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.79
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.917
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.875
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.79,
|
|
"math": 0.917,
|
|
"reasoning": 0.875
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-3-pro-preview",
|
|
"name": "Google: Gemini 3 Pro Preview",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000012",
|
|
"request": "0",
|
|
"image": "0.008256",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000002",
|
|
"input_cache_write": "0.000002375"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.762
|
|
},
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.919
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.762,
|
|
"math": 1.0,
|
|
"reasoning": 0.919
|
|
}
|
|
},
|
|
{
|
|
"id": "deepcogito/cogito-v2.1-671b",
|
|
"name": "Deep Cogito: Cogito v2.1 671B",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00000125",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.1",
|
|
"name": "OpenAI: GPT-5.1",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.763
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.94
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.881
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.763,
|
|
"math": 0.94,
|
|
"reasoning": 0.881
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.1-chat",
|
|
"name": "OpenAI: GPT-5.1 Chat",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.1-codex",
|
|
"name": "OpenAI: GPT-5.1-Codex",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.737
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.737
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5.1-codex-mini",
|
|
"name": "OpenAI: GPT-5.1-Codex-Mini",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000025"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.737
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.737
|
|
}
|
|
},
|
|
{
|
|
"id": "kwaipilot/kat-coder-pro:free",
|
|
"name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-k2-thinking",
|
|
"name": "MoonshotAI: Kimi K2 Thinking",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000045",
|
|
"completion": "0.00000235",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.713
|
|
},
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.845,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.713,
|
|
"math": 1.0,
|
|
"reasoning": 0.8455
|
|
}
|
|
},
|
|
{
|
|
"id": "amazon/nova-premier-v1",
|
|
"name": "Amazon: Nova Premier 1.0",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Nova",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.0000125",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000625"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "perplexity/sonar-pro-search",
|
|
"name": "Perplexity: Sonar Pro Search",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0.018",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/voxtral-small-24b-2507",
|
|
"name": "Mistral: Voxtral Small 24B 2507",
|
|
"context_length": 32000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"audio": "0.0001",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-safeguard-20b",
|
|
"name": "OpenAI: gpt-oss-safeguard-20b",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000075",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000037"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-nano-12b-v2-vl:free",
|
|
"name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-nano-12b-v2-vl",
|
|
"name": "NVIDIA: Nemotron Nano 12B 2 VL",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "minimax/minimax-m2",
|
|
"name": "MiniMax: MiniMax M2",
|
|
"context_length": 196608,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000003"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.694,
|
|
"livecodebench": 0.83
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.78
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.78,
|
|
"mmlu-pro": 0.82
|
|
},
|
|
"tool_calling": {
|
|
"tau-bench": 0.772
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.762,
|
|
"math": 0.78,
|
|
"reasoning": 0.8,
|
|
"tool_calling": 0.772
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-32b-instruct",
|
|
"name": "Qwen: Qwen3 VL 32B Instruct",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.0000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.662
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.689,
|
|
"mmlu-pro": 0.786,
|
|
"mmlu": 0.864
|
|
},
|
|
"general": {
|
|
"ifeval": 0.847
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.662,
|
|
"reasoning": 0.7797,
|
|
"general": 0.847
|
|
}
|
|
},
|
|
{
|
|
"id": "liquid/lfm2-8b-a1b",
|
|
"name": "LiquidAI/LFM2-8B-A1B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "liquid/lfm-2.2-6b",
|
|
"name": "LiquidAI/LFM2-2.6B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "ibm-granite/granite-4.0-h-micro",
|
|
"name": "IBM: Granite 4.0 Micro",
|
|
"context_length": 131000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000017",
|
|
"completion": "0.00000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepcogito/cogito-v2-preview-llama-405b",
|
|
"name": "Deep Cogito: Cogito V2 Preview Llama 405B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000035",
|
|
"completion": "0.0000035",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-image-mini",
|
|
"name": "OpenAI: GPT-5 Image Mini",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text+image",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0.0000025",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000025"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-haiku-4.5",
|
|
"name": "Anthropic: Claude Haiku 4.5",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000001",
|
|
"input_cache_write": "0.00000125"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-8b-thinking",
|
|
"name": "Qwen: Qwen3 VL 8B Thinking",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000018",
|
|
"completion": "0.0000021",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.803
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.699,
|
|
"mmlu-pro": 0.773,
|
|
"mmlu": 0.852
|
|
},
|
|
"general": {
|
|
"ifeval": 0.832
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.803,
|
|
"reasoning": 0.7747,
|
|
"general": 0.832
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-8b-instruct",
|
|
"name": "Qwen: Qwen3 VL 8B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000064",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.459
|
|
},
|
|
"reasoning": {
|
|
"mmlu-pro": 0.716,
|
|
"mmlu": 0.807
|
|
},
|
|
"general": {
|
|
"ifeval": 0.837
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.459,
|
|
"reasoning": 0.7615,
|
|
"general": 0.837
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-image",
|
|
"name": "OpenAI: GPT-5 Image",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text+image",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00001",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.00001",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000125"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o3-deep-research",
|
|
"name": "OpenAI: o3 Deep Research",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00001",
|
|
"completion": "0.00004",
|
|
"request": "0",
|
|
"image": "0.00765",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000025"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o4-mini-deep-research",
|
|
"name": "OpenAI: o4 Mini Deep Research",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0.00153",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.681,
|
|
"aider-polyglot": 0.689
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.927,
|
|
"aime-2024": 0.934
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.814
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.685,
|
|
"math": 0.9305,
|
|
"reasoning": 0.814
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
|
|
"name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"mbpp": 0.913
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.584,
|
|
"math-500": 0.966
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.6667
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.883,
|
|
"mt-bench": 0.917
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.913,
|
|
"math": 0.775,
|
|
"reasoning": 0.6667,
|
|
"general": 0.9
|
|
}
|
|
},
|
|
{
|
|
"id": "baidu/ernie-4.5-21b-a3b-thinking",
|
|
"name": "Baidu: ERNIE 4.5 21B A3B Thinking",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000056",
|
|
"completion": "0.000000224",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash-image",
|
|
"name": "Google: Gemini 2.5 Flash Image (Nano Banana)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text+image",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0.001238",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-30b-a3b-thinking",
|
|
"name": "Qwen: Qwen3 VL 30B A3B Thinking",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000016",
|
|
"completion": "0.0000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.831
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.744,
|
|
"mmlu-pro": 0.805,
|
|
"mmlu": 0.876
|
|
},
|
|
"general": {
|
|
"ifeval": 0.817
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.831,
|
|
"reasoning": 0.8083,
|
|
"general": 0.817
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-30b-a3b-instruct",
|
|
"name": "Qwen: Qwen3 VL 30B A3B Instruct",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.693
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.704,
|
|
"mmlu-pro": 0.778,
|
|
"mmlu": 0.85
|
|
},
|
|
"general": {
|
|
"ifeval": 0.858
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.693,
|
|
"reasoning": 0.7773,
|
|
"general": 0.858
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-pro",
|
|
"name": "OpenAI: GPT-5 Pro",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000015",
|
|
"completion": "0.00012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.6",
|
|
"name": "Z.AI: GLM 4.6",
|
|
"context_length": 204800,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000039",
|
|
"completion": "0.0000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.68
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.939
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.68,
|
|
"math": 0.939,
|
|
"reasoning": 0.81
|
|
}
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.6:exacto",
|
|
"name": "Z.AI: GLM 4.6 (exacto)",
|
|
"context_length": 204800,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000044",
|
|
"completion": "0.00000176",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.68
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.939
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.68,
|
|
"math": 0.939,
|
|
"reasoning": 0.81
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-sonnet-4.5",
|
|
"name": "Anthropic: Claude Sonnet 4.5",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000003",
|
|
"input_cache_write": "0.00000375"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.727
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.705
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.754
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.727,
|
|
"math": 0.705,
|
|
"reasoning": 0.754
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-v3.2-exp",
|
|
"name": "DeepSeek: DeepSeek V3.2 Exp",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-v3.1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000021",
|
|
"completion": "0.00000032",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.678,
|
|
"livecodebench": 0.741,
|
|
"aider-polyglot": 0.745,
|
|
"codeforces": 0.707
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.893
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.799,
|
|
"mmlu-pro": 0.85
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7177,
|
|
"math": 0.893,
|
|
"reasoning": 0.8245
|
|
}
|
|
},
|
|
{
|
|
"id": "thedrummer/cydonia-24b-v4.1",
|
|
"name": "TheDrummer: Cydonia 24B V4.1",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "relace/relace-apply-3",
|
|
"name": "Relace: Relace Apply 3",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000085",
|
|
"completion": "0.00000125",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash-preview-09-2025",
|
|
"name": "Google: Gemini 2.5 Flash Preview 09-2025",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"file",
|
|
"text",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0.001238",
|
|
"audio": "0.000001",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000075",
|
|
"input_cache_write": "0.0000003833"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash-lite-preview-09-2025",
|
|
"name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-235b-a22b-thinking",
|
|
"name": "Qwen: Qwen3 VL 235B A22B Thinking",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.897
|
|
},
|
|
"reasoning": {
|
|
"mmlu-pro": 0.838,
|
|
"mmlu": 0.906
|
|
},
|
|
"general": {
|
|
"ifeval": 0.882
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.897,
|
|
"reasoning": 0.872,
|
|
"general": 0.882
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-vl-235b-a22b-instruct",
|
|
"name": "Qwen: Qwen3 VL 235B A22B Instruct",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.747
|
|
},
|
|
"reasoning": {
|
|
"mmlu-pro": 0.818,
|
|
"mmlu": 0.888
|
|
},
|
|
"general": {
|
|
"ifeval": 0.878
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.747,
|
|
"reasoning": 0.853,
|
|
"general": 0.878
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-max",
|
|
"name": "Qwen: Qwen3 Max",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000012",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000024"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder-plus",
|
|
"name": "Qwen: Qwen3 Coder Plus",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000001"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-codex",
|
|
"name": "OpenAI: GPT-5 Codex",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.745
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.745
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-v3.1-terminus:exacto",
|
|
"name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-v3.1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000021",
|
|
"completion": "0.00000079",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000168"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.66,
|
|
"livecodebench": 0.564,
|
|
"aider-polyglot": 0.684,
|
|
"codeforces": 0.697
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.498,
|
|
"aime-2024": 0.663
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.749,
|
|
"mmlu-pro": 0.837
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6512,
|
|
"math": 0.5805,
|
|
"reasoning": 0.793
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-v3.1-terminus",
|
|
"name": "DeepSeek: DeepSeek V3.1 Terminus",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-v3.1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000021",
|
|
"completion": "0.00000079",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000168"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.66,
|
|
"livecodebench": 0.564,
|
|
"aider-polyglot": 0.684,
|
|
"codeforces": 0.697
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.498,
|
|
"aime-2024": 0.663
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.749,
|
|
"mmlu-pro": 0.837
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6512,
|
|
"math": 0.5805,
|
|
"reasoning": 0.793
|
|
}
|
|
},
|
|
{
|
|
"id": "x-ai/grok-4-fast",
|
|
"name": "xAI: Grok 4 Fast",
|
|
"context_length": 2000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.8
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.92
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.857
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8,
|
|
"math": 0.92,
|
|
"reasoning": 0.857
|
|
}
|
|
},
|
|
{
|
|
"id": "alibaba/tongyi-deepresearch-30b-a3b:free",
|
|
"name": "Tongyi DeepResearch 30B A3B (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "alibaba/tongyi-deepresearch-30b-a3b",
|
|
"name": "Tongyi DeepResearch 30B A3B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000009",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder-flash",
|
|
"name": "Qwen: Qwen3 Coder Flash",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000008"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "opengvlab/internvl3-78b",
|
|
"name": "OpenGVLab: InternVL3 78B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.00000039",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-next-80b-a3b-thinking",
|
|
"name": "Qwen: Qwen3 Next 80B A3B Thinking",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000012",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.878
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.772,
|
|
"mmlu-pro": 0.827
|
|
},
|
|
"general": {
|
|
"ifeval": 0.889
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.878,
|
|
"reasoning": 0.7995,
|
|
"general": 0.889
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-next-80b-a3b-instruct",
|
|
"name": "Qwen: Qwen3 Next 80B A3B Instruct",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000009",
|
|
"completion": "0.0000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"aider-polyglot": 0.498
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.695
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.729,
|
|
"mmlu-pro": 0.806
|
|
},
|
|
"general": {
|
|
"ifeval": 0.876
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.498,
|
|
"math": 0.695,
|
|
"reasoning": 0.7675,
|
|
"general": 0.876
|
|
}
|
|
},
|
|
{
|
|
"id": "meituan/longcat-flash-chat",
|
|
"name": "Meituan: LongCat Flash Chat",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.00000075",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-plus-2025-07-28",
|
|
"name": "Qwen: Qwen Plus 0728",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-plus-2025-07-28:thinking",
|
|
"name": "Qwen: Qwen Plus 0728 (thinking)",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-nano-9b-v2:free",
|
|
"name": "NVIDIA: Nemotron Nano 9B V2 (free)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nvidia/nemotron-nano-9b-v2",
|
|
"name": "NVIDIA: Nemotron Nano 9B V2",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000004",
|
|
"completion": "0.00000016",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.711
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.721,
|
|
"math-500": 0.978
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.64
|
|
},
|
|
"general": {
|
|
"ifeval": 0.903
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.711,
|
|
"math": 0.8495,
|
|
"reasoning": 0.64,
|
|
"general": 0.903
|
|
}
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-k2-0905",
|
|
"name": "MoonshotAI: Kimi K2 0905",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000039",
|
|
"completion": "0.0000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.945
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.72,
|
|
"math": 0.891
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.758,
|
|
"mmlu-pro": 0.825,
|
|
"mmlu": 0.902
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.945,
|
|
"math": 0.8055,
|
|
"reasoning": 0.8283
|
|
}
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-k2-0905:exacto",
|
|
"name": "MoonshotAI: Kimi K2 0905 (exacto)",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000006",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.945
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.72,
|
|
"math": 0.891
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.758,
|
|
"mmlu-pro": 0.825,
|
|
"mmlu": 0.902
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.945,
|
|
"math": 0.8055,
|
|
"reasoning": 0.8283
|
|
}
|
|
},
|
|
{
|
|
"id": "deepcogito/cogito-v2-preview-llama-70b",
|
|
"name": "Deep Cogito: Cogito V2 Preview Llama 70B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000088",
|
|
"completion": "0.00000088",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepcogito/cogito-v2-preview-llama-109b-moe",
|
|
"name": "Cogito V2 Preview Llama 109B",
|
|
"context_length": 32767,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama4",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000018",
|
|
"completion": "0.00000059",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "stepfun-ai/step3",
|
|
"name": "StepFun: Step3",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000057",
|
|
"completion": "0.00000142",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-30b-a3b-thinking-2507",
|
|
"name": "Qwen: Qwen3 30B A3B Thinking 2507",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000051",
|
|
"completion": "0.00000034",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.626
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.709,
|
|
"aime-2024": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.658
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.691
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.91
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.626,
|
|
"math": 0.7565,
|
|
"reasoning": 0.658,
|
|
"tool_calling": 0.691,
|
|
"general": 0.91
|
|
}
|
|
},
|
|
{
|
|
"id": "x-ai/grok-code-fast-1",
|
|
"name": "xAI: Grok Code Fast 1",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000002"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.708
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.708
|
|
}
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-4-70b",
|
|
"name": "Nous: Hermes 4 70B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000011",
|
|
"completion": "0.00000038",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-4-405b",
|
|
"name": "Nous: Hermes 4 405B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash-image-preview",
|
|
"name": "Google: Gemini 2.5 Flash Image Preview (Nano Banana)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text+image",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0.001238",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-chat-v3.1",
|
|
"name": "DeepSeek: DeepSeek V3.1",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-v3.1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.00000075",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-audio-preview",
|
|
"name": "OpenAI: GPT-4o Audio",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"audio",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"audio": "0.00004",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-medium-3.1",
|
|
"name": "Mistral: Mistral Medium 3.1",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "baidu/ernie-4.5-21b-a3b",
|
|
"name": "Baidu: ERNIE 4.5 21B A3B",
|
|
"context_length": 120000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000056",
|
|
"completion": "0.000000224",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "baidu/ernie-4.5-vl-28b-a3b",
|
|
"name": "Baidu: ERNIE 4.5 VL 28B A3B",
|
|
"context_length": 30000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000112",
|
|
"completion": "0.000000448",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.5v",
|
|
"name": "Z.AI: GLM 4.5V",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000048",
|
|
"completion": "0.00000144",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000088",
|
|
"input_cache_write": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.642,
|
|
"livecodebench": 0.729
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.91,
|
|
"math-500": 0.982
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.791,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6855,
|
|
"math": 0.946,
|
|
"reasoning": 0.8185
|
|
}
|
|
},
|
|
{
|
|
"id": "ai21/jamba-mini-1.7",
|
|
"name": "AI21: Jamba Mini 1.7",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "ai21/jamba-large-1.7",
|
|
"name": "AI21: Jamba Large 1.7",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-chat",
|
|
"name": "OpenAI: GPT-5 Chat",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-5",
|
|
"name": "OpenAI: GPT-5",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.8
|
|
},
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.924
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8,
|
|
"math": 1.0,
|
|
"reasoning": 0.924
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-mini",
|
|
"name": "OpenAI: GPT-5 Mini",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000025"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.911
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.823
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.911,
|
|
"reasoning": 0.823
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-5-nano",
|
|
"name": "OpenAI: GPT-5 Nano",
|
|
"context_length": 400000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000005"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2025": 0.852
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.712
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.852,
|
|
"reasoning": 0.712
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-120b:free",
|
|
"name": "OpenAI: gpt-oss-120b (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"codeforces": 0.821
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.801,
|
|
"mmlu": 0.9
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.821,
|
|
"reasoning": 0.8505
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-120b",
|
|
"name": "OpenAI: gpt-oss-120b",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000039",
|
|
"completion": "0.00000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"codeforces": 0.821
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.801,
|
|
"mmlu": 0.9
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.821,
|
|
"reasoning": 0.8505
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-120b:exacto",
|
|
"name": "OpenAI: gpt-oss-120b (exacto)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000039",
|
|
"completion": "0.00000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"codeforces": 0.821
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.801,
|
|
"mmlu": 0.9
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.821,
|
|
"reasoning": 0.8505
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-20b:free",
|
|
"name": "OpenAI: gpt-oss-20b (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"codeforces": 0.7433
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.715,
|
|
"mmlu": 0.853
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7433,
|
|
"reasoning": 0.784
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-oss-20b",
|
|
"name": "OpenAI: gpt-oss-20b",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"codeforces": 0.7433
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.715,
|
|
"mmlu": 0.853
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7433,
|
|
"reasoning": 0.784
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-opus-4.1",
|
|
"name": "Anthropic: Claude Opus 4.1",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000015",
|
|
"completion": "0.000075",
|
|
"request": "0",
|
|
"image": "0.024",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000015",
|
|
"input_cache_write": "0.00001875"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.725
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.755
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.796
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.725,
|
|
"math": 0.755,
|
|
"reasoning": 0.796
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/codestral-2508",
|
|
"name": "Mistral: Codestral 2508",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000009",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder-30b-a3b-instruct",
|
|
"name": "Qwen: Qwen3 Coder 30B A3B Instruct",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000025",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-30b-a3b-instruct-2507",
|
|
"name": "Qwen: Qwen3 30B A3B Instruct 2507",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000008",
|
|
"completion": "0.00000033",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.626
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.709,
|
|
"aime-2024": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.658
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.691
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.91
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.626,
|
|
"math": 0.7565,
|
|
"reasoning": 0.658,
|
|
"tool_calling": 0.691,
|
|
"general": 0.91
|
|
}
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.5",
|
|
"name": "Z.AI: GLM 4.5",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000035",
|
|
"completion": "0.00000155",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.642,
|
|
"livecodebench": 0.729
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.91,
|
|
"math-500": 0.982
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.791,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6855,
|
|
"math": 0.946,
|
|
"reasoning": 0.8185
|
|
}
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.5-air:free",
|
|
"name": "Z.AI: GLM 4.5 Air (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.642,
|
|
"livecodebench": 0.729
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.91,
|
|
"math-500": 0.982
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.791,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6855,
|
|
"math": 0.946,
|
|
"reasoning": 0.8185
|
|
}
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4.5-air",
|
|
"name": "Z.AI: GLM 4.5 Air",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000104",
|
|
"completion": "0.00000068",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.642,
|
|
"livecodebench": 0.729
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.91,
|
|
"math-500": 0.982
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.791,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6855,
|
|
"math": 0.946,
|
|
"reasoning": 0.8185
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-235b-a22b-thinking-2507",
|
|
"name": "Qwen: Qwen3 235B A22B Thinking 2507",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000011",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.707,
|
|
"mbpp": 0.814
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.815,
|
|
"aime-2024": 0.857,
|
|
"gsm8k": 0.9439,
|
|
"math": 0.7184
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4747,
|
|
"mmlu-pro": 0.6818,
|
|
"mmlu": 0.8781
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.708
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.956
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7605,
|
|
"math": 0.8336,
|
|
"reasoning": 0.6782,
|
|
"tool_calling": 0.708,
|
|
"general": 0.956
|
|
}
|
|
},
|
|
{
|
|
"id": "z-ai/glm-4-32b",
|
|
"name": "Z.AI: GLM 4 32B ",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder:free",
|
|
"name": "Qwen: Qwen3 Coder 480B A35B (free)",
|
|
"context_length": 262000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder",
|
|
"name": "Qwen: Qwen3 Coder 480B A35B",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000022",
|
|
"completion": "0.00000095",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.696,
|
|
"aider-polyglot": 0.618
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.657
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-coder:exacto",
|
|
"name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000022",
|
|
"completion": "0.0000018",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "bytedance/ui-tars-1.5-7b",
|
|
"name": "ByteDance: UI-TARS 7B ",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash-lite",
|
|
"name": "Google: Gemini 2.5 Flash Lite",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000001",
|
|
"input_cache_write": "0.0000001833"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-235b-a22b-2507",
|
|
"name": "Qwen: Qwen3 235B A22B Instruct 2507",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000071",
|
|
"completion": "0.000000463",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.707,
|
|
"mbpp": 0.814
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.815,
|
|
"aime-2024": 0.857,
|
|
"gsm8k": 0.9439,
|
|
"math": 0.7184
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4747,
|
|
"mmlu-pro": 0.6818,
|
|
"mmlu": 0.8781
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.708
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.956
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7605,
|
|
"math": 0.8336,
|
|
"reasoning": 0.6782,
|
|
"tool_calling": 0.708,
|
|
"general": 0.956
|
|
}
|
|
},
|
|
{
|
|
"id": "switchpoint/router",
|
|
"name": "Switchpoint Router",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000085",
|
|
"completion": "0.0000034",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-k2:free",
|
|
"name": "MoonshotAI: Kimi K2 0711 (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-k2",
|
|
"name": "MoonshotAI: Kimi K2 0711",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000456",
|
|
"completion": "0.00000184",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.713
|
|
},
|
|
"math": {
|
|
"aime-2025": 1.0
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.845,
|
|
"mmlu-pro": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.713,
|
|
"math": 1.0,
|
|
"reasoning": 0.8455
|
|
}
|
|
},
|
|
{
|
|
"id": "thudm/glm-4.1v-9b-thinking",
|
|
"name": "THUDM: GLM 4.1V 9B Thinking",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000028",
|
|
"completion": "0.0000001104",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/devstral-medium",
|
|
"name": "Mistral: Devstral Medium",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.616
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.616
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/devstral-small",
|
|
"name": "Mistral: Devstral Small 1.1",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000007",
|
|
"completion": "0.00000028",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.536
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.536
|
|
}
|
|
},
|
|
{
|
|
"id": "cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
|
|
"name": "Venice: Uncensored (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "x-ai/grok-4",
|
|
"name": "xAI: Grok 4",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.8
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.92
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.857
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8,
|
|
"math": 0.92,
|
|
"reasoning": 0.857
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3n-e2b-it:free",
|
|
"name": "Google: Gemma 3n 2B (free)",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.665,
|
|
"livecodebench": 0.132,
|
|
"mbpp": 0.566
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.067
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.248,
|
|
"mmlu-pro": 0.405,
|
|
"mmlu": 0.601
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.4543,
|
|
"math": 0.067,
|
|
"reasoning": 0.418
|
|
}
|
|
},
|
|
{
|
|
"id": "tencent/hunyuan-a13b-instruct",
|
|
"name": "Tencent: Hunyuan A13B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000014",
|
|
"completion": "0.00000057",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "tngtech/deepseek-r1t2-chimera:free",
|
|
"name": "TNG: DeepSeek R1T2 Chimera (free)",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "tngtech/deepseek-r1t2-chimera",
|
|
"name": "TNG: DeepSeek R1T2 Chimera",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "morph/morph-v3-large",
|
|
"name": "Morph: Morph V3 Large",
|
|
"context_length": 262144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000009",
|
|
"completion": "0.0000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "morph/morph-v3-fast",
|
|
"name": "Morph: Morph V3 Fast",
|
|
"context_length": 81920,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "baidu/ernie-4.5-vl-424b-a47b",
|
|
"name": "Baidu: ERNIE 4.5 VL 424B A47B ",
|
|
"context_length": 123000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000336",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "baidu/ernie-4.5-300b-a47b",
|
|
"name": "Baidu: ERNIE 4.5 300B A47B ",
|
|
"context_length": 123000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000224",
|
|
"completion": "0.00000088",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "inception/mercury",
|
|
"name": "Inception: Mercury",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-small-3.2-24b-instruct",
|
|
"name": "Mistral: Mistral Small 3.2 24B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000018",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"math": 0.6942
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4613,
|
|
"mmlu-pro": 0.6906,
|
|
"mmlu": 0.805
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.431
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.6942,
|
|
"reasoning": 0.6523,
|
|
"general": 0.431
|
|
}
|
|
},
|
|
{
|
|
"id": "minimax/minimax-m1",
|
|
"name": "MiniMax: MiniMax M1",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000022",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.56,
|
|
"livecodebench": 0.65
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.769,
|
|
"aime-2024": 0.86,
|
|
"math-500": 0.968
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.7,
|
|
"mmlu-pro": 0.811
|
|
},
|
|
"long_context": {
|
|
"longbench-v2": 0.615
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.605,
|
|
"math": 0.8657,
|
|
"reasoning": 0.7555,
|
|
"long_context": 0.615
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-flash",
|
|
"name": "Google: Gemini 2.5 Flash",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000025",
|
|
"request": "0",
|
|
"image": "0.001238",
|
|
"audio": "0.000001",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000003",
|
|
"input_cache_write": "0.0000003833"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.604,
|
|
"aider-polyglot": 0.619
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.72,
|
|
"aime-2024": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.828
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6115,
|
|
"math": 0.8,
|
|
"reasoning": 0.828
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-pro",
|
|
"name": "Google: Gemini 2.5 Pro",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.00516",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000125",
|
|
"input_cache_write": "0.000001625"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.672,
|
|
"livecodebench": 0.69,
|
|
"aider-polyglot": 0.822
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.864
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.728,
|
|
"math": 0.88,
|
|
"reasoning": 0.864
|
|
}
|
|
},
|
|
{
|
|
"id": "moonshotai/kimi-dev-72b",
|
|
"name": "MoonshotAI: Kimi Dev 72B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000029",
|
|
"completion": "0.00000115",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o3-pro",
|
|
"name": "OpenAI: o3 Pro",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"file",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00002",
|
|
"completion": "0.00008",
|
|
"request": "0",
|
|
"image": "0.0153",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "x-ai/grok-3-mini",
|
|
"name": "xAI: Grok 3 Mini",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.804
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.908,
|
|
"aime-2024": 0.958
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.84
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.804,
|
|
"math": 0.933,
|
|
"reasoning": 0.84
|
|
}
|
|
},
|
|
{
|
|
"id": "x-ai/grok-3",
|
|
"name": "xAI: Grok 3",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.804
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.908,
|
|
"aime-2024": 0.958
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.84
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.804,
|
|
"math": 0.933,
|
|
"reasoning": 0.84
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-pro-preview",
|
|
"name": "Google: Gemini 2.5 Pro Preview 06-05",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"file",
|
|
"image",
|
|
"text",
|
|
"audio"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.00516",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000031",
|
|
"input_cache_write": "0.000001625"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.672,
|
|
"livecodebench": 0.69,
|
|
"aider-polyglot": 0.822
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.88
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.864
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.728,
|
|
"math": 0.88,
|
|
"reasoning": 0.864
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-0528-qwen3-8b",
|
|
"name": "DeepSeek: DeepSeek R1 0528 Qwen3 8B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.446,
|
|
"livecodebench": 0.733,
|
|
"aider-polyglot": 0.716,
|
|
"codeforces": 0.6433
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.875,
|
|
"aime-2024": 0.914
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81,
|
|
"mmlu-pro": 0.85
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6346,
|
|
"math": 0.8945,
|
|
"reasoning": 0.83
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-0528:free",
|
|
"name": "DeepSeek: R1 0528 (free)",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.446,
|
|
"livecodebench": 0.733,
|
|
"aider-polyglot": 0.716,
|
|
"codeforces": 0.6433
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.875,
|
|
"aime-2024": 0.914
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81,
|
|
"mmlu-pro": 0.85
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6346,
|
|
"math": 0.8945,
|
|
"reasoning": 0.83
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-0528",
|
|
"name": "DeepSeek: R1 0528",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.00000175",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.446,
|
|
"livecodebench": 0.733,
|
|
"aider-polyglot": 0.716,
|
|
"codeforces": 0.6433
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.875,
|
|
"aime-2024": 0.914
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81,
|
|
"mmlu-pro": 0.85
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6346,
|
|
"math": 0.8945,
|
|
"reasoning": 0.83
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-opus-4",
|
|
"name": "Anthropic: Claude Opus 4",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000015",
|
|
"completion": "0.000075",
|
|
"request": "0",
|
|
"image": "0.024",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000015",
|
|
"input_cache_write": "0.00001875"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.809
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.87
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.809,
|
|
"reasoning": 0.87
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-sonnet-4",
|
|
"name": "Anthropic: Claude Sonnet 4",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0.0048",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000003",
|
|
"input_cache_write": "0.00000375"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.727
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.705
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.754
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.727,
|
|
"math": 0.705,
|
|
"reasoning": 0.754
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/devstral-small-2505",
|
|
"name": "Mistral: Devstral Small 2505",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemma-3n-e4b-it:free",
|
|
"name": "Google: Gemma 3n 4B (free)",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.75,
|
|
"livecodebench": 0.132,
|
|
"mbpp": 0.636
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.116
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.237,
|
|
"mmlu-pro": 0.506,
|
|
"mmlu": 0.649
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.506,
|
|
"math": 0.116,
|
|
"reasoning": 0.464
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3n-e4b-it",
|
|
"name": "Google: Gemma 3n 4B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.00000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.75,
|
|
"livecodebench": 0.132,
|
|
"mbpp": 0.636
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.116
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.237,
|
|
"mmlu-pro": 0.506,
|
|
"mmlu": 0.649
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.506,
|
|
"math": 0.116,
|
|
"reasoning": 0.464
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/codex-mini",
|
|
"name": "OpenAI: Codex Mini",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000015",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000375"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/deephermes-3-mistral-24b-preview",
|
|
"name": "Nous: DeepHermes 3 Mistral 24B Preview",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-medium-3",
|
|
"name": "Mistral: Mistral Medium 3",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.5-pro-preview-05-06",
|
|
"name": "Google: Gemini 2.5 Pro Preview 05-06",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000125",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.00516",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000031",
|
|
"input_cache_write": "0.000001625"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.632,
|
|
"aider-polyglot": 0.765
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.83,
|
|
"aime-2024": 0.92
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.83
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6985,
|
|
"math": 0.875,
|
|
"reasoning": 0.83
|
|
}
|
|
},
|
|
{
|
|
"id": "arcee-ai/spotlight",
|
|
"name": "Arcee AI: Spotlight",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000018",
|
|
"completion": "0.00000018",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arcee-ai/maestro-reasoning",
|
|
"name": "Arcee AI: Maestro Reasoning",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000009",
|
|
"completion": "0.0000033",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arcee-ai/virtuoso-large",
|
|
"name": "Arcee AI: Virtuoso Large",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000075",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arcee-ai/coder-large",
|
|
"name": "Arcee AI: Coder Large",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.0000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-4-reasoning-plus",
|
|
"name": "Microsoft: Phi 4 Reasoning Plus",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000007",
|
|
"completion": "0.00000035",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.826
|
|
},
|
|
"math": {
|
|
"math": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.561,
|
|
"mmlu-pro": 0.704,
|
|
"mmlu": 0.848
|
|
},
|
|
"general": {
|
|
"ifeval": 0.63,
|
|
"arena-hard": 0.754
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.826,
|
|
"math": 0.804,
|
|
"reasoning": 0.7043,
|
|
"general": 0.692
|
|
}
|
|
},
|
|
{
|
|
"id": "inception/mercury-coder",
|
|
"name": "Inception: Mercury Coder",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-4b:free",
|
|
"name": "Qwen: Qwen3 4B (free)",
|
|
"context_length": 40960,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-prover-v2",
|
|
"name": "DeepSeek: DeepSeek Prover V2",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.00000218",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-guard-4-12b",
|
|
"name": "Meta: Llama Guard 4 12B",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000018",
|
|
"completion": "0.00000018",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-30b-a3b",
|
|
"name": "Qwen: Qwen3 30B A3B",
|
|
"context_length": 40960,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000022",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.626
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.709,
|
|
"aime-2024": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.658
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.691
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.91
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.626,
|
|
"math": 0.7565,
|
|
"reasoning": 0.658,
|
|
"tool_calling": 0.691,
|
|
"general": 0.91
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-8b",
|
|
"name": "Qwen: Qwen3 8B",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000028",
|
|
"completion": "0.0000001104",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-14b",
|
|
"name": "Qwen: Qwen3 14B",
|
|
"context_length": 40960,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.00000022",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-32b",
|
|
"name": "Qwen: Qwen3 32B",
|
|
"context_length": 40960,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000008",
|
|
"completion": "0.00000024",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.657,
|
|
"codeforces": 0.659
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.729,
|
|
"aime-2024": 0.814
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.703
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.938
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.658,
|
|
"math": 0.7715,
|
|
"tool_calling": 0.703,
|
|
"general": 0.938
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-235b-a22b:free",
|
|
"name": "Qwen: Qwen3 235B A22B (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.707,
|
|
"mbpp": 0.814
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.815,
|
|
"aime-2024": 0.857,
|
|
"gsm8k": 0.9439,
|
|
"math": 0.7184
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4747,
|
|
"mmlu-pro": 0.6818,
|
|
"mmlu": 0.8781
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.708
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.956
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7605,
|
|
"math": 0.8336,
|
|
"reasoning": 0.6782,
|
|
"tool_calling": 0.708,
|
|
"general": 0.956
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen3-235b-a22b",
|
|
"name": "Qwen: Qwen3 235B A22B",
|
|
"context_length": 40960,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen3",
|
|
"instruct_type": "qwen3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000018",
|
|
"completion": "0.00000054",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.707,
|
|
"mbpp": 0.814
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.815,
|
|
"aime-2024": 0.857,
|
|
"gsm8k": 0.9439,
|
|
"math": 0.7184
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4747,
|
|
"mmlu-pro": 0.6818,
|
|
"mmlu": 0.8781
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.708
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.956
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7605,
|
|
"math": 0.8336,
|
|
"reasoning": 0.6782,
|
|
"tool_calling": 0.708,
|
|
"general": 0.956
|
|
}
|
|
},
|
|
{
|
|
"id": "tngtech/deepseek-r1t-chimera:free",
|
|
"name": "TNG: DeepSeek R1T Chimera (free)",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "tngtech/deepseek-r1t-chimera",
|
|
"name": "TNG: DeepSeek R1T Chimera",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o4-mini-high",
|
|
"name": "OpenAI: o4 Mini High",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000011",
|
|
"completion": "0.0000044",
|
|
"request": "0",
|
|
"image": "0.0008415",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000275"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.681,
|
|
"aider-polyglot": 0.689
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.927,
|
|
"aime-2024": 0.934
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.814
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.685,
|
|
"math": 0.9305,
|
|
"reasoning": 0.814
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/o3",
|
|
"name": "OpenAI: o3",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0.00153",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.691,
|
|
"aider-polyglot": 0.813
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.864,
|
|
"aime-2024": 0.916
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.833
|
|
},
|
|
"tool_calling": {
|
|
"tau-bench": 0.63
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.752,
|
|
"math": 0.89,
|
|
"reasoning": 0.833,
|
|
"tool_calling": 0.63
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/o4-mini",
|
|
"name": "OpenAI: o4 Mini",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000011",
|
|
"completion": "0.0000044",
|
|
"request": "0",
|
|
"image": "0.0008415",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000275"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.681,
|
|
"aider-polyglot": 0.689
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.927,
|
|
"aime-2024": 0.934
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.814
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.685,
|
|
"math": 0.9305,
|
|
"reasoning": 0.814
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen2.5-coder-7b-instruct",
|
|
"name": "Qwen: Qwen2.5 Coder 7B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000009",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4.1",
|
|
"name": "OpenAI: GPT-4.1",
|
|
"context_length": 1047576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000005"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.546,
|
|
"aider-polyglot": 0.516
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.464,
|
|
"aime-2024": 0.481
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.663,
|
|
"mmlu": 0.902
|
|
},
|
|
"general": {
|
|
"ifeval": 0.874
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.531,
|
|
"math": 0.4725,
|
|
"reasoning": 0.7825,
|
|
"general": 0.874
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4.1-mini",
|
|
"name": "OpenAI: GPT-4.1 Mini",
|
|
"context_length": 1047576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000016",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000001"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.236,
|
|
"aider-polyglot": 0.347
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.402,
|
|
"aime-2024": 0.496
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.65,
|
|
"mmlu": 0.875
|
|
},
|
|
"general": {
|
|
"ifeval": 0.841
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.2915,
|
|
"math": 0.449,
|
|
"reasoning": 0.7625,
|
|
"general": 0.841
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4.1-nano",
|
|
"name": "OpenAI: GPT-4.1 Nano",
|
|
"context_length": 1047576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"image",
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.01",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000025"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"aider-polyglot": 0.098
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.294
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.503,
|
|
"mmlu": 0.801
|
|
},
|
|
"general": {
|
|
"ifeval": 0.745
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.098,
|
|
"math": 0.294,
|
|
"reasoning": 0.652,
|
|
"general": 0.745
|
|
}
|
|
},
|
|
{
|
|
"id": "eleutherai/llemma_7b",
|
|
"name": "EleutherAI: Llemma 7b",
|
|
"context_length": 4096,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "code-llama"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "alfredpros/codellama-7b-instruct-solidity",
|
|
"name": "AlfredPros: CodeLLaMa 7B Instruct Solidity",
|
|
"context_length": 4096,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "alpaca"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "arliai/qwq-32b-arliai-rpr-v1",
|
|
"name": "ArliAI: QwQ 32B RpR v1",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.634
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.795,
|
|
"math-500": 0.906
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.652
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.664
|
|
},
|
|
"general": {
|
|
"ifeval": 0.839
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.634,
|
|
"math": 0.8505,
|
|
"reasoning": 0.652,
|
|
"tool_calling": 0.664,
|
|
"general": 0.839
|
|
}
|
|
},
|
|
{
|
|
"id": "x-ai/grok-3-mini-beta",
|
|
"name": "xAI: Grok 3 Mini Beta",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.804
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.908,
|
|
"aime-2024": 0.958
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.84
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.804,
|
|
"math": 0.933,
|
|
"reasoning": 0.84
|
|
}
|
|
},
|
|
{
|
|
"id": "x-ai/grok-3-beta",
|
|
"name": "xAI: Grok 3 Beta",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Grok",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.794
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.933,
|
|
"aime-2024": 0.933
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.846
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.794,
|
|
"math": 0.933,
|
|
"reasoning": 0.846
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia/llama-3.1-nemotron-ultra-253b-v1",
|
|
"name": "NVIDIA: Llama 3.1 Nemotron Ultra 253B v1",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000006",
|
|
"completion": "0.0000018",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.6631
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.725,
|
|
"math-500": 0.97
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.7601
|
|
},
|
|
"general": {
|
|
"ifeval": 0.8945
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6631,
|
|
"math": 0.8475,
|
|
"reasoning": 0.7601,
|
|
"general": 0.8945
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-4-maverick",
|
|
"name": "Meta: Llama 4 Maverick",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama4",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0.0006684",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.434,
|
|
"mbpp": 0.776
|
|
},
|
|
"math": {
|
|
"math": 0.612
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.698,
|
|
"mmlu-pro": 0.805,
|
|
"mmlu": 0.855
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.605,
|
|
"math": 0.612,
|
|
"reasoning": 0.786
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-4-scout",
|
|
"name": "Meta: Llama 4 Scout",
|
|
"context_length": 327680,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama4",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000008",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0.0003342",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.328,
|
|
"mbpp": 0.678
|
|
},
|
|
"math": {
|
|
"math": 0.503
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.572,
|
|
"mmlu-pro": 0.743,
|
|
"mmlu": 0.796
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.503,
|
|
"math": 0.503,
|
|
"reasoning": 0.7037
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen2.5-vl-32b-instruct",
|
|
"name": "Qwen: Qwen2.5 VL 32B Instruct",
|
|
"context_length": 16384,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.00000022",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.915,
|
|
"mbpp": 0.84
|
|
},
|
|
"math": {
|
|
"math": 0.822
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.46,
|
|
"mmlu-pro": 0.688,
|
|
"mmlu": 0.784
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8775,
|
|
"math": 0.822,
|
|
"reasoning": 0.644
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-chat-v3-0324",
|
|
"name": "DeepSeek: DeepSeek V3 0324",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.00000088",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000106"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o1-pro",
|
|
"name": "OpenAI: o1-pro",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00015",
|
|
"completion": "0.0006",
|
|
"request": "0",
|
|
"image": "0.21675",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"aime-2024": 0.86
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.79
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.86,
|
|
"reasoning": 0.79
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-small-3.1-24b-instruct:free",
|
|
"name": "Mistral: Mistral Small 3.1 24B (free)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-small-3.1-24b-instruct",
|
|
"name": "Mistral: Mistral Small 3.1 24B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.8841,
|
|
"mbpp": 0.7471
|
|
},
|
|
"math": {
|
|
"math": 0.693
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4596,
|
|
"mmlu-pro": 0.6676,
|
|
"mmlu": 0.8062
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.8156,
|
|
"math": 0.693,
|
|
"reasoning": 0.6445
|
|
}
|
|
},
|
|
{
|
|
"id": "allenai/olmo-2-0325-32b-instruct",
|
|
"name": "AllenAI: Olmo 2 32B Instruct",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemma-3-4b-it:free",
|
|
"name": "Google: Gemma 3 4B (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.713,
|
|
"livecodebench": 0.126,
|
|
"mbpp": 0.632
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.892,
|
|
"math": 0.756
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.308,
|
|
"mmlu-pro": 0.436
|
|
},
|
|
"general": {
|
|
"ifeval": 0.902
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.4903,
|
|
"math": 0.824,
|
|
"reasoning": 0.372,
|
|
"general": 0.902
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3-4b-it",
|
|
"name": "Google: Gemma 3 4B",
|
|
"context_length": 96000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000001703012",
|
|
"completion": "0.0000000681536",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.713,
|
|
"livecodebench": 0.126,
|
|
"mbpp": 0.632
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.892,
|
|
"math": 0.756
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.308,
|
|
"mmlu-pro": 0.436
|
|
},
|
|
"general": {
|
|
"ifeval": 0.902
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.4903,
|
|
"math": 0.824,
|
|
"reasoning": 0.372,
|
|
"general": 0.902
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3-12b-it:free",
|
|
"name": "Google: Gemma 3 12B (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.854,
|
|
"livecodebench": 0.246,
|
|
"mbpp": 0.73
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.944,
|
|
"math": 0.838
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.409,
|
|
"mmlu-pro": 0.606
|
|
},
|
|
"general": {
|
|
"ifeval": 0.889
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.61,
|
|
"math": 0.891,
|
|
"reasoning": 0.5075,
|
|
"general": 0.889
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3-12b-it",
|
|
"name": "Google: Gemma 3 12B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.854,
|
|
"livecodebench": 0.246,
|
|
"mbpp": 0.73
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.944,
|
|
"math": 0.838
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.409,
|
|
"mmlu-pro": 0.606
|
|
},
|
|
"general": {
|
|
"ifeval": 0.889
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.61,
|
|
"math": 0.891,
|
|
"reasoning": 0.5075,
|
|
"general": 0.889
|
|
}
|
|
},
|
|
{
|
|
"id": "cohere/command-a",
|
|
"name": "Cohere: Command A",
|
|
"context_length": 256000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-mini-search-preview",
|
|
"name": "OpenAI: GPT-4o-mini Search Preview",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0.0275",
|
|
"image": "0.000217",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-search-preview",
|
|
"name": "OpenAI: GPT-4o Search Preview",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0.035",
|
|
"image": "0.003613",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemma-3-27b-it:free",
|
|
"name": "Google: Gemma 3 27B (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.878,
|
|
"livecodebench": 0.297,
|
|
"mbpp": 0.744
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.959,
|
|
"math": 0.89
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.424,
|
|
"mmlu-pro": 0.675
|
|
},
|
|
"general": {
|
|
"ifeval": 0.904
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6397,
|
|
"math": 0.9245,
|
|
"reasoning": 0.5495,
|
|
"general": 0.904
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-3-27b-it",
|
|
"name": "Google: Gemma 3 27B",
|
|
"context_length": 96000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000004",
|
|
"completion": "0.00000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.878,
|
|
"livecodebench": 0.297,
|
|
"mbpp": 0.744
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.959,
|
|
"math": 0.89
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.424,
|
|
"mmlu-pro": 0.675
|
|
},
|
|
"general": {
|
|
"ifeval": 0.904
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6397,
|
|
"math": 0.9245,
|
|
"reasoning": 0.5495,
|
|
"general": 0.904
|
|
}
|
|
},
|
|
{
|
|
"id": "thedrummer/skyfall-36b-v2",
|
|
"name": "TheDrummer: Skyfall 36B V2",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000055",
|
|
"completion": "0.0000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-4-multimodal-instruct",
|
|
"name": "Microsoft: Phi 4 Multimodal Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0.00017685",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.826
|
|
},
|
|
"math": {
|
|
"math": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.561,
|
|
"mmlu-pro": 0.704,
|
|
"mmlu": 0.848
|
|
},
|
|
"general": {
|
|
"ifeval": 0.63,
|
|
"arena-hard": 0.754
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.826,
|
|
"math": 0.804,
|
|
"reasoning": 0.7043,
|
|
"general": 0.692
|
|
}
|
|
},
|
|
{
|
|
"id": "perplexity/sonar-reasoning-pro",
|
|
"name": "Perplexity: Sonar Reasoning Pro",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.005",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "perplexity/sonar-pro",
|
|
"name": "Perplexity: Sonar Pro",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.005",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "perplexity/sonar-deep-research",
|
|
"name": "Perplexity: Sonar Deep Research",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0.005",
|
|
"internal_reasoning": "0.000003"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwq-32b",
|
|
"name": "Qwen: QwQ 32B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "qwq"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.634
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.795,
|
|
"math-500": 0.906
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.652
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.664
|
|
},
|
|
"general": {
|
|
"ifeval": 0.839
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.634,
|
|
"math": 0.8505,
|
|
"reasoning": 0.652,
|
|
"tool_calling": 0.664,
|
|
"general": 0.839
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.0-flash-lite-001",
|
|
"name": "Google: Gemini 2.0 Flash Lite",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000075",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.351
|
|
},
|
|
"math": {
|
|
"math": 0.897
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.621,
|
|
"mmlu-pro": 0.764
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.351,
|
|
"math": 0.897,
|
|
"reasoning": 0.6925
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3.7-sonnet:thinking",
|
|
"name": "Anthropic: Claude 3.7 Sonnet (thinking)",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0.0048",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000003",
|
|
"input_cache_write": "0.00000375"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3.7-sonnet",
|
|
"name": "Anthropic: Claude 3.7 Sonnet",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0.0048",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000003",
|
|
"input_cache_write": "0.00000375"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-saba",
|
|
"name": "Mistral: Saba",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-guard-3-8b",
|
|
"name": "Llama Guard 3 8B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "none"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.00000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o3-mini-high",
|
|
"name": "OpenAI: o3 Mini High",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000011",
|
|
"completion": "0.0000044",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000055"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.493,
|
|
"aider-polyglot": 0.667
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.873,
|
|
"math": 0.979
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.772,
|
|
"mmlu": 0.869
|
|
},
|
|
"general": {
|
|
"ifeval": 0.939
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.58,
|
|
"math": 0.926,
|
|
"reasoning": 0.8205,
|
|
"general": 0.939
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemini-2.0-flash-001",
|
|
"name": "Google: Gemini 2.0 Flash",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0.0000258",
|
|
"audio": "0.0000007",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000025",
|
|
"input_cache_write": "0.0000001833"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.351
|
|
},
|
|
"math": {
|
|
"math": 0.897
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.621,
|
|
"mmlu-pro": 0.764
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.351,
|
|
"math": 0.897,
|
|
"reasoning": 0.6925
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen-vl-plus",
|
|
"name": "Qwen: Qwen VL Plus",
|
|
"context_length": 7500,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000021",
|
|
"completion": "0.00000063",
|
|
"request": "0",
|
|
"image": "0.0002688",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "aion-labs/aion-1.0",
|
|
"name": "AionLabs: Aion-1.0",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000004",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "aion-labs/aion-1.0-mini",
|
|
"name": "AionLabs: Aion-1.0-Mini",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000007",
|
|
"completion": "0.0000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "aion-labs/aion-rp-llama-3.1-8b",
|
|
"name": "AionLabs: Aion-RP 1.0 (8B)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-vl-max",
|
|
"name": "Qwen: Qwen VL Max",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.0000032",
|
|
"request": "0",
|
|
"image": "0.001024",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-turbo",
|
|
"name": "Qwen: Qwen-Turbo",
|
|
"context_length": 1000000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000005",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000002"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen2.5-vl-72b-instruct",
|
|
"name": "Qwen: Qwen2.5 VL 72B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000013",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-plus",
|
|
"name": "Qwen: Qwen-Plus",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000016"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-max",
|
|
"name": "Qwen: Qwen-Max ",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000016",
|
|
"completion": "0.0000064",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000064"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o3-mini",
|
|
"name": "OpenAI: o3 Mini",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000011",
|
|
"completion": "0.0000044",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000055"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.493,
|
|
"aider-polyglot": 0.667
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.873,
|
|
"math": 0.979
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.772,
|
|
"mmlu": 0.869
|
|
},
|
|
"general": {
|
|
"ifeval": 0.939
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.58,
|
|
"math": 0.926,
|
|
"reasoning": 0.8205,
|
|
"general": 0.939
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-small-24b-instruct-2501",
|
|
"name": "Mistral: Mistral Small 3",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.848
|
|
},
|
|
"math": {
|
|
"math": 0.706
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.453,
|
|
"mmlu-pro": 0.663
|
|
},
|
|
"general": {
|
|
"ifeval": 0.829,
|
|
"arena-hard": 0.876,
|
|
"mt-bench": 0.835
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.848,
|
|
"math": 0.706,
|
|
"reasoning": 0.558,
|
|
"general": 0.8467
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-distill-qwen-32b",
|
|
"name": "DeepSeek: R1 Distill Qwen 32B",
|
|
"context_length": 64000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000024",
|
|
"completion": "0.00000024",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.572
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.833,
|
|
"math-500": 0.943
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.621
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.572,
|
|
"math": 0.888,
|
|
"reasoning": 0.621
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-distill-qwen-14b",
|
|
"name": "DeepSeek: R1 Distill Qwen 14B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000012",
|
|
"completion": "0.00000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.531
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.8,
|
|
"math-500": 0.939
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.591
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.531,
|
|
"math": 0.8695,
|
|
"reasoning": 0.591
|
|
}
|
|
},
|
|
{
|
|
"id": "perplexity/sonar-reasoning",
|
|
"name": "Perplexity: Sonar Reasoning",
|
|
"context_length": 127000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000005",
|
|
"request": "0.005",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "perplexity/sonar",
|
|
"name": "Perplexity: Sonar",
|
|
"context_length": 127072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000001",
|
|
"request": "0.005",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1-distill-llama-70b",
|
|
"name": "DeepSeek: R1 Distill Llama 70B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000013",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.575
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.867,
|
|
"math-500": 0.945
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.652
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.575,
|
|
"math": 0.906,
|
|
"reasoning": 0.652
|
|
}
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-r1",
|
|
"name": "DeepSeek: R1",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": "deepseek-r1"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.446,
|
|
"livecodebench": 0.733,
|
|
"aider-polyglot": 0.716,
|
|
"codeforces": 0.6433
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.875,
|
|
"aime-2024": 0.914
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.81,
|
|
"mmlu-pro": 0.85
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6346,
|
|
"math": 0.8945,
|
|
"reasoning": 0.83
|
|
}
|
|
},
|
|
{
|
|
"id": "minimax/minimax-01",
|
|
"name": "MiniMax: MiniMax-01",
|
|
"context_length": 1000192,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-4",
|
|
"name": "Microsoft: Phi 4",
|
|
"context_length": 16384,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.826
|
|
},
|
|
"math": {
|
|
"math": 0.804
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.561,
|
|
"mmlu-pro": 0.704,
|
|
"mmlu": 0.848
|
|
},
|
|
"general": {
|
|
"ifeval": 0.63,
|
|
"arena-hard": 0.754
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.826,
|
|
"math": 0.804,
|
|
"reasoning": 0.7043,
|
|
"general": 0.692
|
|
}
|
|
},
|
|
{
|
|
"id": "sao10k/l3.1-70b-hanami-x1",
|
|
"name": "Sao10K: Llama 3.1 70B Hanami x1",
|
|
"context_length": 16000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "deepseek/deepseek-chat",
|
|
"name": "DeepSeek: DeepSeek V3",
|
|
"context_length": 163840,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "DeepSeek",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "sao10k/l3.3-euryale-70b",
|
|
"name": "Sao10K: Llama 3.3 Euryale 70B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000065",
|
|
"completion": "0.00000075",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/o1",
|
|
"name": "OpenAI: o1",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000015",
|
|
"completion": "0.00006",
|
|
"request": "0",
|
|
"image": "0.021675",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.413
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.42,
|
|
"math": 0.855
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.733,
|
|
"mmlu": 0.908
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.413,
|
|
"math": 0.6375,
|
|
"reasoning": 0.8205
|
|
}
|
|
},
|
|
{
|
|
"id": "cohere/command-r7b-12-2024",
|
|
"name": "Cohere: Command R7B (12-2024)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Cohere",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000000375",
|
|
"completion": "0.00000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "google/gemini-2.0-flash-exp:free",
|
|
"name": "Google: Gemini 2.0 Flash Experimental (free)",
|
|
"context_length": 1048576,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.351
|
|
},
|
|
"math": {
|
|
"math": 0.897
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.621,
|
|
"mmlu-pro": 0.764
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.351,
|
|
"math": 0.897,
|
|
"reasoning": 0.6925
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.3-70b-instruct:free",
|
|
"name": "Meta: Llama 3.3 70B Instruct (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.884
|
|
},
|
|
"math": {
|
|
"math": 0.77
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.505,
|
|
"mmlu-pro": 0.689,
|
|
"mmlu": 0.86
|
|
},
|
|
"general": {
|
|
"ifeval": 0.921
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.884,
|
|
"math": 0.77,
|
|
"reasoning": 0.6847,
|
|
"general": 0.921
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.3-70b-instruct",
|
|
"name": "Meta: Llama 3.3 70B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.00000032",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.884
|
|
},
|
|
"math": {
|
|
"math": 0.77
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.505,
|
|
"mmlu-pro": 0.689,
|
|
"mmlu": 0.86
|
|
},
|
|
"general": {
|
|
"ifeval": 0.921
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.884,
|
|
"math": 0.77,
|
|
"reasoning": 0.6847,
|
|
"general": 0.921
|
|
}
|
|
},
|
|
{
|
|
"id": "amazon/nova-lite-v1",
|
|
"name": "Amazon: Nova Lite 1.0",
|
|
"context_length": 300000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Nova",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000024",
|
|
"request": "0",
|
|
"image": "0.00009",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.854
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.945,
|
|
"math": 0.733
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.42,
|
|
"mmlu": 0.805
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.666
|
|
},
|
|
"general": {
|
|
"ifeval": 0.897
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.854,
|
|
"math": 0.839,
|
|
"reasoning": 0.6125,
|
|
"tool_calling": 0.666,
|
|
"general": 0.897
|
|
}
|
|
},
|
|
{
|
|
"id": "amazon/nova-micro-v1",
|
|
"name": "Amazon: Nova Micro 1.0",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Nova",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000035",
|
|
"completion": "0.00000014",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.811
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.923,
|
|
"math": 0.693
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.4,
|
|
"mmlu": 0.776
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.562
|
|
},
|
|
"general": {
|
|
"ifeval": 0.872
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.811,
|
|
"math": 0.808,
|
|
"reasoning": 0.588,
|
|
"tool_calling": 0.562,
|
|
"general": 0.872
|
|
}
|
|
},
|
|
{
|
|
"id": "amazon/nova-pro-v1",
|
|
"name": "Amazon: Nova Pro 1.0",
|
|
"context_length": 300000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Nova",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.0000032",
|
|
"request": "0",
|
|
"image": "0.0012",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.89
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.948,
|
|
"math": 0.766
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.469,
|
|
"mmlu": 0.859
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.684
|
|
},
|
|
"general": {
|
|
"ifeval": 0.921
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.89,
|
|
"math": 0.857,
|
|
"reasoning": 0.664,
|
|
"tool_calling": 0.684,
|
|
"general": 0.921
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-2024-11-20",
|
|
"name": "OpenAI: GPT-4o (2024-11-20)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.003613",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000125"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-large-2411",
|
|
"name": "Mistral Large 2411",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-large-2407",
|
|
"name": "Mistral Large 2407",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/pixtral-large-2411",
|
|
"name": "Mistral: Pixtral Large 2411",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0.002888",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-2.5-coder-32b-instruct",
|
|
"name": "Qwen2.5 Coder 32B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000011",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.927,
|
|
"livecodebench": 0.314,
|
|
"mbpp": 0.902
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.911,
|
|
"math": 0.572
|
|
},
|
|
"reasoning": {
|
|
"mmlu-pro": 0.504,
|
|
"mmlu": 0.751,
|
|
"hellaswag": 0.83,
|
|
"winogrande": 0.808
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7143,
|
|
"math": 0.7415,
|
|
"reasoning": 0.7232
|
|
}
|
|
},
|
|
{
|
|
"id": "raifle/sorcererlm-8x22b",
|
|
"name": "SorcererLM 8x22B",
|
|
"context_length": 16000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "vicuna"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000045",
|
|
"completion": "0.0000045",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "thedrummer/unslopnemo-12b",
|
|
"name": "TheDrummer: UnslopNemo 12B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3.5-haiku",
|
|
"name": "Anthropic: Claude 3.5 Haiku",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000008",
|
|
"input_cache_write": "0.000001"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3.5-haiku-20241022",
|
|
"name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000008",
|
|
"completion": "0.000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000008",
|
|
"input_cache_write": "0.000001"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3.5-sonnet",
|
|
"name": "Anthropic: Claude 3.5 Sonnet",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000006",
|
|
"completion": "0.00003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "anthracite-org/magnum-v4-72b",
|
|
"name": "Magnum v4 72B",
|
|
"context_length": 16384,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/ministral-8b",
|
|
"name": "Mistral: Ministral 8B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.348
|
|
},
|
|
"math": {
|
|
"math": 0.545
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.65,
|
|
"winogrande": 0.753
|
|
},
|
|
"general": {
|
|
"arena-hard": 0.709,
|
|
"mt-bench": 0.83
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.348,
|
|
"math": 0.545,
|
|
"reasoning": 0.7015,
|
|
"general": 0.7695
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/ministral-3b",
|
|
"name": "Mistral: Ministral 3B",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000004",
|
|
"completion": "0.00000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"livecodebench": 0.548
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.721,
|
|
"aime-2024": 0.775
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.534
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.548,
|
|
"math": 0.748,
|
|
"reasoning": 0.534
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen-2.5-7b-instruct",
|
|
"name": "Qwen: Qwen2.5 7B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000004",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.848,
|
|
"livecodebench": 0.287,
|
|
"mbpp": 0.792
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.916,
|
|
"math": 0.755
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.364,
|
|
"mmlu-pro": 0.563
|
|
},
|
|
"general": {
|
|
"ifeval": 0.712,
|
|
"arena-hard": 0.52,
|
|
"mt-bench": 0.875
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.6423,
|
|
"math": 0.8355,
|
|
"reasoning": 0.4635,
|
|
"general": 0.7023
|
|
}
|
|
},
|
|
{
|
|
"id": "nvidia/llama-3.1-nemotron-70b-instruct",
|
|
"name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000012",
|
|
"completion": "0.0000012",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"gsm8k": 0.9143
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.802,
|
|
"hellaswag": 0.8558,
|
|
"winogrande": 0.8453
|
|
},
|
|
"general": {
|
|
"mt-bench": 0.0899
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.9143,
|
|
"reasoning": 0.8344,
|
|
"general": 0.0899
|
|
}
|
|
},
|
|
{
|
|
"id": "inflection/inflection-3-pi",
|
|
"name": "Inflection: Inflection 3 Pi",
|
|
"context_length": 8000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "inflection/inflection-3-productivity",
|
|
"name": "Inflection: Inflection 3 Productivity",
|
|
"context_length": 8000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "thedrummer/rocinante-12b",
|
|
"name": "TheDrummer: Rocinante 12B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000017",
|
|
"completion": "0.00000043",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.2-90b-vision-instruct",
|
|
"name": "Meta: Llama 3.2 90B Vision Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000035",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0.0005058",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.2-11b-vision-instruct",
|
|
"name": "Meta: Llama 3.2 11B Vision Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000049",
|
|
"completion": "0.000000049",
|
|
"request": "0",
|
|
"image": "0.00007948",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.2-1b-instruct",
|
|
"name": "Meta: Llama 3.2 1B Instruct",
|
|
"context_length": 60000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000027",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.2-3b-instruct:free",
|
|
"name": "Meta: Llama 3.2 3B Instruct (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"gsm8k": 0.777,
|
|
"math": 0.48
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.328,
|
|
"mmlu": 0.634,
|
|
"hellaswag": 0.698
|
|
},
|
|
"tool_calling": {
|
|
"nexus": 0.343
|
|
},
|
|
"long_context": {
|
|
"infinitebench-en.mc": 0.633
|
|
},
|
|
"general": {
|
|
"ifeval": 0.774
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.6285,
|
|
"reasoning": 0.5533,
|
|
"tool_calling": 0.343,
|
|
"long_context": 0.633,
|
|
"general": 0.774
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.2-3b-instruct",
|
|
"name": "Meta: Llama 3.2 3B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.00000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"math": {
|
|
"gsm8k": 0.777,
|
|
"math": 0.48
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.328,
|
|
"mmlu": 0.634,
|
|
"hellaswag": 0.698
|
|
},
|
|
"tool_calling": {
|
|
"nexus": 0.343
|
|
},
|
|
"long_context": {
|
|
"infinitebench-en.mc": 0.633
|
|
},
|
|
"general": {
|
|
"ifeval": 0.774
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"math": 0.6285,
|
|
"reasoning": 0.5533,
|
|
"tool_calling": 0.343,
|
|
"long_context": 0.633,
|
|
"general": 0.774
|
|
}
|
|
},
|
|
{
|
|
"id": "qwen/qwen-2.5-72b-instruct",
|
|
"name": "Qwen2.5 72B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000007",
|
|
"completion": "0.00000026",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.866,
|
|
"livecodebench": 0.555,
|
|
"mbpp": 0.882
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.958,
|
|
"math": 0.831
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.49,
|
|
"mmlu-pro": 0.711
|
|
},
|
|
"general": {
|
|
"ifeval": 0.841,
|
|
"arena-hard": 0.812,
|
|
"mt-bench": 0.935
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.7677,
|
|
"math": 0.8945,
|
|
"reasoning": 0.6005,
|
|
"general": 0.8627
|
|
}
|
|
},
|
|
{
|
|
"id": "neversleep/llama-3.1-lumimaid-8b",
|
|
"name": "NeverSleep: Lumimaid v0.2 8B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000009",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/pixtral-12b",
|
|
"name": "Mistral: Pixtral 12B",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0.0001445",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.72
|
|
},
|
|
"math": {
|
|
"math": 0.481
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.692
|
|
},
|
|
"general": {
|
|
"ifeval": 0.613,
|
|
"mt-bench": 0.768
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.72,
|
|
"math": 0.481,
|
|
"reasoning": 0.692,
|
|
"general": 0.6905
|
|
}
|
|
},
|
|
{
|
|
"id": "cohere/command-r-08-2024",
|
|
"name": "Cohere: Command R (08-2024)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Cohere",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "cohere/command-r-plus-08-2024",
|
|
"name": "Cohere: Command R+ (08-2024)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Cohere",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-2.5-vl-7b-instruct:free",
|
|
"name": "Qwen: Qwen2.5-VL 7B Instruct (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "qwen/qwen-2.5-vl-7b-instruct",
|
|
"name": "Qwen: Qwen2.5-VL 7B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Qwen",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0.0001445",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "sao10k/l3.1-euryale-70b",
|
|
"name": "Sao10K: Llama 3.1 Euryale 70B v2.2",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000065",
|
|
"completion": "0.00000075",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-3.5-mini-128k-instruct",
|
|
"name": "Microsoft: Phi-3.5 Mini 128K Instruct",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "phi3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-3-llama-3.1-70b",
|
|
"name": "Nous: Hermes 3 70B Instruct",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-3-llama-3.1-405b:free",
|
|
"name": "Nous: Hermes 3 405B Instruct (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-3-llama-3.1-405b",
|
|
"name": "Nous: Hermes 3 405B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/chatgpt-4o-latest",
|
|
"name": "OpenAI: ChatGPT-4o",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000005",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0.007225",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "sao10k/l3-lunaris-8b",
|
|
"name": "Sao10K: Llama 3 8B Lunaris",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000004",
|
|
"completion": "0.00000005",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-2024-08-06",
|
|
"name": "OpenAI: GPT-4o (2024-08-06)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.003613",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.332,
|
|
"aider-polyglot": 0.307
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.131
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.701,
|
|
"mmlu-pro": 0.747,
|
|
"mmlu": 0.857
|
|
},
|
|
"general": {
|
|
"ifeval": 0.81
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.3195,
|
|
"math": 0.131,
|
|
"reasoning": 0.7683,
|
|
"general": 0.81
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.1-405b",
|
|
"name": "Meta: Llama 3.1 405B (base)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "none"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000004",
|
|
"completion": "0.000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.89
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.968,
|
|
"math": 0.738
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.507,
|
|
"mmlu-pro": 0.733,
|
|
"mmlu": 0.873
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.885,
|
|
"nexus": 0.587
|
|
},
|
|
"general": {
|
|
"ifeval": 0.886
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.89,
|
|
"math": 0.853,
|
|
"reasoning": 0.7043,
|
|
"tool_calling": 0.736,
|
|
"general": 0.886
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.1-405b-instruct:free",
|
|
"name": "Meta: Llama 3.1 405B Instruct (free)",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.89
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.968,
|
|
"math": 0.738
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.507,
|
|
"mmlu-pro": 0.733,
|
|
"mmlu": 0.873
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.885,
|
|
"nexus": 0.587
|
|
},
|
|
"general": {
|
|
"ifeval": 0.886
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.89,
|
|
"math": 0.853,
|
|
"reasoning": 0.7043,
|
|
"tool_calling": 0.736,
|
|
"general": 0.886
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.1-405b-instruct",
|
|
"name": "Meta: Llama 3.1 405B Instruct",
|
|
"context_length": 130815,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000035",
|
|
"completion": "0.0000035",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.89
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.968,
|
|
"math": 0.738
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.507,
|
|
"mmlu-pro": 0.733,
|
|
"mmlu": 0.873
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.885,
|
|
"nexus": 0.587
|
|
},
|
|
"general": {
|
|
"ifeval": 0.886
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.89,
|
|
"math": 0.853,
|
|
"reasoning": 0.7043,
|
|
"tool_calling": 0.736,
|
|
"general": 0.886
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.1-8b-instruct",
|
|
"name": "Meta: Llama 3.1 8B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.00000003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.726
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.304,
|
|
"mmlu-pro": 0.483,
|
|
"mmlu": 0.694
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.761,
|
|
"nexus": 0.385
|
|
},
|
|
"general": {
|
|
"ifeval": 0.804
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.726,
|
|
"reasoning": 0.4937,
|
|
"tool_calling": 0.573,
|
|
"general": 0.804
|
|
}
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3.1-70b-instruct",
|
|
"name": "Meta: Llama 3.1 70B Instruct",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000004",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.805
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.417,
|
|
"mmlu-pro": 0.664,
|
|
"mmlu": 0.836
|
|
},
|
|
"tool_calling": {
|
|
"bfcl": 0.848,
|
|
"nexus": 0.567
|
|
},
|
|
"general": {
|
|
"ifeval": 0.875
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.805,
|
|
"reasoning": 0.639,
|
|
"tool_calling": 0.7075,
|
|
"general": 0.875
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-nemo",
|
|
"name": "Mistral: Mistral Nemo",
|
|
"context_length": 131072,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000002",
|
|
"completion": "0.00000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"reasoning": {
|
|
"mmlu": 0.68,
|
|
"hellaswag": 0.835,
|
|
"winogrande": 0.768,
|
|
"commonsenseqa": 0.704
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"reasoning": 0.7468
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-mini-2024-07-18",
|
|
"name": "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0.007225",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.087,
|
|
"humaneval": 0.872
|
|
},
|
|
"math": {
|
|
"math": 0.702
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.402,
|
|
"mmlu": 0.82
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.4795,
|
|
"math": 0.702,
|
|
"reasoning": 0.611
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-mini",
|
|
"name": "OpenAI: GPT-4o-mini",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000015",
|
|
"completion": "0.0000006",
|
|
"request": "0",
|
|
"image": "0.000217",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.000000075"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.087,
|
|
"humaneval": 0.872
|
|
},
|
|
"math": {
|
|
"math": 0.702
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.402,
|
|
"mmlu": 0.82
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.4795,
|
|
"math": 0.702,
|
|
"reasoning": 0.611
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-2-27b-it",
|
|
"name": "Google: Gemma 2 27B",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000065",
|
|
"completion": "0.00000065",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.518,
|
|
"mbpp": 0.626
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.74,
|
|
"math": 0.423
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.752,
|
|
"hellaswag": 0.864,
|
|
"winogrande": 0.837
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.572,
|
|
"math": 0.5815,
|
|
"reasoning": 0.8177
|
|
}
|
|
},
|
|
{
|
|
"id": "google/gemma-2-9b-it",
|
|
"name": "Google: Gemma 2 9B",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Gemini",
|
|
"instruct_type": "gemma"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000009",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.402,
|
|
"mbpp": 0.524
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.686,
|
|
"math": 0.366
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.713,
|
|
"hellaswag": 0.819,
|
|
"winogrande": 0.806
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.463,
|
|
"math": 0.526,
|
|
"reasoning": 0.7793
|
|
}
|
|
},
|
|
{
|
|
"id": "sao10k/l3-euryale-70b",
|
|
"name": "Sao10k: Llama 3 Euryale 70B v2.1",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000148",
|
|
"completion": "0.00000148",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-7b-instruct:free",
|
|
"name": "Mistral: Mistral 7B Instruct (free)",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0",
|
|
"completion": "0",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-7b-instruct",
|
|
"name": "Mistral: Mistral 7B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000028",
|
|
"completion": "0.000000054",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "nousresearch/hermes-2-pro-llama-3-8b",
|
|
"name": "NousResearch: Hermes 2 Pro - Llama-3 8B",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000000025",
|
|
"completion": "0.00000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-7b-instruct-v0.3",
|
|
"name": "Mistral: Mistral 7B Instruct v0.3",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-3-mini-128k-instruct",
|
|
"name": "Microsoft: Phi-3 Mini 128K Instruct",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "phi3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000001",
|
|
"completion": "0.0000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/phi-3-medium-128k-instruct",
|
|
"name": "Microsoft: Phi-3 Medium 128K Instruct",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Other",
|
|
"instruct_type": "phi3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-guard-2-8b",
|
|
"name": "Meta: LlamaGuard 2 8B",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "none"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o-2024-05-13",
|
|
"name": "OpenAI: GPT-4o (2024-05-13)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000005",
|
|
"completion": "0.000015",
|
|
"request": "0",
|
|
"image": "0.007225",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.902
|
|
},
|
|
"math": {
|
|
"math": 0.766
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.536,
|
|
"mmlu-pro": 0.726,
|
|
"mmlu": 0.887
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.902,
|
|
"math": 0.766,
|
|
"reasoning": 0.7163
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o",
|
|
"name": "OpenAI: GPT-4o",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000025",
|
|
"completion": "0.00001",
|
|
"request": "0",
|
|
"image": "0.003613",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000125"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.332,
|
|
"aider-polyglot": 0.307
|
|
},
|
|
"math": {
|
|
"aime-2024": 0.131
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.701,
|
|
"mmlu-pro": 0.747,
|
|
"mmlu": 0.857
|
|
},
|
|
"general": {
|
|
"ifeval": 0.81
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.3195,
|
|
"math": 0.131,
|
|
"reasoning": 0.7683,
|
|
"general": 0.81
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-4o:extended",
|
|
"name": "OpenAI: GPT-4o (extended)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image",
|
|
"file"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000006",
|
|
"completion": "0.000018",
|
|
"request": "0",
|
|
"image": "0.007225",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3-70b-instruct",
|
|
"name": "Meta: Llama 3 70B Instruct",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000003",
|
|
"completion": "0.0000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "meta-llama/llama-3-8b-instruct",
|
|
"name": "Meta: Llama 3 8B Instruct",
|
|
"context_length": 8192,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama3",
|
|
"instruct_type": "llama3"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000003",
|
|
"completion": "0.00000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mixtral-8x22b-instruct",
|
|
"name": "Mistral: Mixtral 8x22B Instruct",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "microsoft/wizardlm-2-8x22b",
|
|
"name": "WizardLM-2 8x22B",
|
|
"context_length": 65536,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "vicuna"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000048",
|
|
"completion": "0.00000048",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4-turbo",
|
|
"name": "OpenAI: GPT-4 Turbo",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00001",
|
|
"completion": "0.00003",
|
|
"request": "0",
|
|
"image": "0.01445",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.871
|
|
},
|
|
"math": {
|
|
"math": 0.726
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.48,
|
|
"mmlu": 0.865
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.871,
|
|
"math": 0.726,
|
|
"reasoning": 0.6725
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3-haiku",
|
|
"name": "Anthropic: Claude 3 Haiku",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.00000125",
|
|
"request": "0",
|
|
"image": "0.0004",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.00000003",
|
|
"input_cache_write": "0.0000003"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.759
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.889,
|
|
"math": 0.389
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.333,
|
|
"mmlu": 0.752,
|
|
"hellaswag": 0.859
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.759,
|
|
"math": 0.639,
|
|
"reasoning": 0.648
|
|
}
|
|
},
|
|
{
|
|
"id": "anthropic/claude-3-opus",
|
|
"name": "Anthropic: Claude 3 Opus",
|
|
"context_length": 200000,
|
|
"architecture": {
|
|
"modality": "text+image->text",
|
|
"input_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Claude",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000015",
|
|
"completion": "0.000075",
|
|
"request": "0",
|
|
"image": "0.024",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0",
|
|
"input_cache_read": "0.0000015",
|
|
"input_cache_write": "0.00001875"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.849
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.95,
|
|
"math": 0.601
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.504,
|
|
"mmlu-pro": 0.685,
|
|
"mmlu": 0.868,
|
|
"hellaswag": 0.954
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.849,
|
|
"math": 0.7755,
|
|
"reasoning": 0.7528
|
|
}
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-large",
|
|
"name": "Mistral Large",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000002",
|
|
"completion": "0.000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.92
|
|
},
|
|
"math": {
|
|
"gsm8k": 0.93
|
|
},
|
|
"reasoning": {
|
|
"mmlu": 0.84
|
|
},
|
|
"general": {
|
|
"mt-bench": 0.863
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.92,
|
|
"math": 0.93,
|
|
"reasoning": 0.84,
|
|
"general": 0.863
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-3.5-turbo-0613",
|
|
"name": "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
"context_length": 4095,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4-turbo-preview",
|
|
"name": "OpenAI: GPT-4 Turbo Preview",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00001",
|
|
"completion": "0.00003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-tiny",
|
|
"name": "Mistral Tiny",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000025",
|
|
"completion": "0.00000025",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-7b-instruct-v0.2",
|
|
"name": "Mistral: Mistral 7B Instruct v0.2",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000002",
|
|
"completion": "0.0000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mixtral-8x7b-instruct",
|
|
"name": "Mistral: Mixtral 8x7B Instruct",
|
|
"context_length": 32768,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000054",
|
|
"completion": "0.00000054",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "neversleep/noromaid-20b",
|
|
"name": "Noromaid 20B",
|
|
"context_length": 4096,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama2",
|
|
"instruct_type": "alpaca"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000001",
|
|
"completion": "0.00000175",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "alpindale/goliath-120b",
|
|
"name": "Goliath 120B",
|
|
"context_length": 6144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama2",
|
|
"instruct_type": "airoboros"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000006",
|
|
"completion": "0.000008",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openrouter/auto",
|
|
"name": "Auto Router",
|
|
"context_length": 2000000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Router",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "-1",
|
|
"completion": "-1"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4-1106-preview",
|
|
"name": "OpenAI: GPT-4 Turbo (older v1106)",
|
|
"context_length": 128000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00001",
|
|
"completion": "0.00003",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-3.5-turbo-instruct",
|
|
"name": "OpenAI: GPT-3.5 Turbo Instruct",
|
|
"context_length": 4095,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": "chatml"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000015",
|
|
"completion": "0.000002",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mistralai/mistral-7b-instruct-v0.1",
|
|
"name": "Mistral: Mistral 7B Instruct v0.1",
|
|
"context_length": 2824,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Mistral",
|
|
"instruct_type": "mistral"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000011",
|
|
"completion": "0.00000019",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-3.5-turbo-16k",
|
|
"name": "OpenAI: GPT-3.5 Turbo 16k",
|
|
"context_length": 16385,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.000003",
|
|
"completion": "0.000004",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "mancer/weaver",
|
|
"name": "Mancer: Weaver (alpha)",
|
|
"context_length": 8000,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama2",
|
|
"instruct_type": "alpaca"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000075",
|
|
"completion": "0.000001",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "undi95/remm-slerp-l2-13b",
|
|
"name": "ReMM SLERP 13B",
|
|
"context_length": 6144,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama2",
|
|
"instruct_type": "alpaca"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000045",
|
|
"completion": "0.00000065",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "gryphe/mythomax-l2-13b",
|
|
"name": "MythoMax 13B",
|
|
"context_length": 4096,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "Llama2",
|
|
"instruct_type": "alpaca"
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00000006",
|
|
"completion": "0.00000006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4-0314",
|
|
"name": "OpenAI: GPT-4 (older v0314)",
|
|
"context_length": 8191,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00003",
|
|
"completion": "0.00006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": null,
|
|
"category_scores": null
|
|
},
|
|
{
|
|
"id": "openai/gpt-4",
|
|
"name": "OpenAI: GPT-4",
|
|
"context_length": 8191,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.00003",
|
|
"completion": "0.00006",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"swe-bench-verified": 0.546,
|
|
"aider-polyglot": 0.516
|
|
},
|
|
"math": {
|
|
"aime-2025": 0.464,
|
|
"aime-2024": 0.481
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.663,
|
|
"mmlu": 0.902
|
|
},
|
|
"general": {
|
|
"ifeval": 0.874
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.531,
|
|
"math": 0.4725,
|
|
"reasoning": 0.7825,
|
|
"general": 0.874
|
|
}
|
|
},
|
|
{
|
|
"id": "openai/gpt-3.5-turbo",
|
|
"name": "OpenAI: GPT-3.5 Turbo",
|
|
"context_length": 16385,
|
|
"architecture": {
|
|
"modality": "text->text",
|
|
"input_modalities": [
|
|
"text"
|
|
],
|
|
"output_modalities": [
|
|
"text"
|
|
],
|
|
"tokenizer": "GPT",
|
|
"instruct_type": null
|
|
},
|
|
"pricing": {
|
|
"prompt": "0.0000005",
|
|
"completion": "0.0000015",
|
|
"request": "0",
|
|
"image": "0",
|
|
"web_search": "0",
|
|
"internal_reasoning": "0"
|
|
},
|
|
"benchmarks": {
|
|
"code": {
|
|
"humaneval": 0.68
|
|
},
|
|
"math": {
|
|
"math": 0.431
|
|
},
|
|
"reasoning": {
|
|
"gpqa": 0.308,
|
|
"mmlu": 0.698
|
|
}
|
|
},
|
|
"category_scores": {
|
|
"code": 0.68,
|
|
"math": 0.431,
|
|
"reasoning": 0.503
|
|
}
|
|
}
|
|
]
|
|
} |