diff --git a/providers/digitalocean/models/alibaba-qwen3-32b.toml b/providers/digitalocean/models/alibaba-qwen3-32b.toml new file mode 100644 index 000000000..c5ab93f89 --- /dev/null +++ b/providers/digitalocean/models/alibaba-qwen3-32b.toml @@ -0,0 +1,21 @@ +name = "Qwen3-32B" +family = "qwen" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true +release_date = "2025-04" +last_updated = "2025-04" + +[cost] +input = 0.25 +output = 0.55 + +[limit] +context = 131_072 +output = 40_960 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-4.1-opus.toml b/providers/digitalocean/models/anthropic-claude-4.1-opus.toml new file mode 100644 index 000000000..ed2d55687 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-4.1-opus.toml @@ -0,0 +1,23 @@ +name = "Claude Opus 4.1" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-08" +last_updated = "2025-08" + +[cost] +input = 15.00 +output = 75.00 +cache_read = 1.50 +cache_write = 18.75 + +[limit] +context = 200_000 +output = 32_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-4.5-sonnet.toml b/providers/digitalocean/models/anthropic-claude-4.5-sonnet.toml new file mode 100644 index 000000000..d998057a5 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-4.5-sonnet.toml @@ -0,0 +1,27 @@ +name = "Claude Sonnet 4.5" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-09" +last_updated = "2025-09" + +[cost] +input = 3.00 +output = 15.00 +cache_read = 0.30 +cache_write = 3.75 + +[cost.context_over_200k] +input = 6.00 +output = 22.50 + +[limit] +context = 1_000_000 +output = 64_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-4.6-sonnet.toml b/providers/digitalocean/models/anthropic-claude-4.6-sonnet.toml new file mode 100644 index 000000000..cea2ec632 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-4.6-sonnet.toml @@ -0,0 +1,27 @@ +name = "Claude Sonnet 4.6" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-02" +last_updated = "2026-02" + +[cost] +input = 3.00 +output = 15.00 +cache_read = 0.30 +cache_write = 3.75 + +[cost.context_over_200k] +input = 6.00 +output = 22.50 + +[limit] +context = 1_000_000 +output = 64_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-haiku-4.5.toml b/providers/digitalocean/models/anthropic-claude-haiku-4.5.toml new file mode 100644 index 000000000..753278ac4 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-haiku-4.5.toml @@ -0,0 +1,23 @@ +name = "Claude Haiku 4.5" +family = "claude" +attachment = true +reasoning = false +tool_call = true +temperature = true +open_weights = false +release_date = "2025-10" +last_updated = "2025-10" + +[cost] +input = 1.00 +output = 5.00 +cache_read = 1.00 +cache_write = 1.25 + +[limit] +context = 200_000 +output = 64_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-opus-4.5.toml b/providers/digitalocean/models/anthropic-claude-opus-4.5.toml new file mode 100644 index 000000000..456c4011d --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-opus-4.5.toml @@ -0,0 +1,23 @@ +name = "Claude Opus 4.5" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-11" +last_updated = "2025-11" + +[cost] +input = 5.00 +output = 25.00 +cache_read = 0.50 +cache_write = 6.25 + +[limit] +context = 200_000 +output = 64_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-opus-4.6.toml b/providers/digitalocean/models/anthropic-claude-opus-4.6.toml new file mode 100644 index 000000000..0147861e8 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-opus-4.6.toml @@ -0,0 +1,27 @@ +name = "Claude Opus 4.6" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-02" +last_updated = "2026-02" + +[cost] +input = 5.00 +output = 25.00 +cache_read = 0.50 +cache_write = 6.25 + +[cost.context_over_200k] +input = 10.00 +output = 37.50 + +[limit] +context = 1_000_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-opus-4.toml b/providers/digitalocean/models/anthropic-claude-opus-4.toml new file mode 100644 index 000000000..4b6b173ea --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-opus-4.toml @@ -0,0 +1,23 @@ +name = "Claude Opus 4" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-05" +last_updated = "2025-05" + +[cost] +input = 15.00 +output = 75.00 +cache_read = 1.50 +cache_write = 18.75 + +[limit] +context = 200_000 +output = 32_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/anthropic-claude-sonnet-4.toml b/providers/digitalocean/models/anthropic-claude-sonnet-4.toml new file mode 100644 index 000000000..11af71610 --- /dev/null +++ b/providers/digitalocean/models/anthropic-claude-sonnet-4.toml @@ -0,0 +1,27 @@ +name = "Claude Sonnet 4" +family = "claude" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-05" +last_updated = "2025-05" + +[cost] +input = 3.00 +output = 15.00 +cache_read = 0.30 +cache_write = 3.75 + +[cost.context_over_200k] +input = 6.00 +output = 22.50 + +[limit] +context = 1_000_000 +output = 64_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/deepseek-r1-distill-llama-70b.toml b/providers/digitalocean/models/deepseek-r1-distill-llama-70b.toml new file mode 100644 index 000000000..01ad62924 --- /dev/null +++ b/providers/digitalocean/models/deepseek-r1-distill-llama-70b.toml @@ -0,0 +1,21 @@ +name = "DeepSeek R1 Distill Llama 70B" +family = "deepseek" +attachment = false +reasoning = true +tool_call = false +temperature = true +open_weights = true +release_date = "2025-01" +last_updated = "2025-01" + +[cost] +input = 0.99 +output = 0.99 + +[limit] +context = 128_000 +output = 32_768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/glm-5.toml b/providers/digitalocean/models/glm-5.toml new file mode 100644 index 000000000..a1253ed05 --- /dev/null +++ b/providers/digitalocean/models/glm-5.toml @@ -0,0 +1,21 @@ +name = "GLM 5" +family = "glm" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true +release_date = "2025-07" +last_updated = "2025-07" + +[cost] +input = 1.00 +output = 3.20 + +[limit] +context = 128_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/kimi-k2.5.toml b/providers/digitalocean/models/kimi-k2.5.toml new file mode 100644 index 000000000..8ce64ce0e --- /dev/null +++ b/providers/digitalocean/models/kimi-k2.5.toml @@ -0,0 +1,21 @@ +name = "Kimi K2.5" +family = "kimi" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true +release_date = "2025-07" +last_updated = "2025-07" + +[cost] +input = 0.50 +output = 2.70 + +[limit] +context = 131_072 +output = 32_768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/llama3-8b-instruct.toml b/providers/digitalocean/models/llama3-8b-instruct.toml new file mode 100644 index 000000000..d2743de6f --- /dev/null +++ b/providers/digitalocean/models/llama3-8b-instruct.toml @@ -0,0 +1,21 @@ +name = "Llama 3.1 8B Instruct" +family = "llama" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true +release_date = "2024-07" +last_updated = "2024-07" + +[cost] +input = 0.20 +output = 0.20 + +[limit] +context = 128_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/llama3.3-70b-instruct.toml b/providers/digitalocean/models/llama3.3-70b-instruct.toml new file mode 100644 index 000000000..c99a3fd73 --- /dev/null +++ b/providers/digitalocean/models/llama3.3-70b-instruct.toml @@ -0,0 +1,21 @@ +name = "Llama 3.3 70B Instruct" +family = "llama" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true +release_date = "2024-12" +last_updated = "2024-12" + +[cost] +input = 0.65 +output = 0.65 + +[limit] +context = 128_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/minimax-m2.5.toml b/providers/digitalocean/models/minimax-m2.5.toml new file mode 100644 index 000000000..3b1666bd5 --- /dev/null +++ b/providers/digitalocean/models/minimax-m2.5.toml @@ -0,0 +1,22 @@ +name = "MiniMax M2.5" +family = "minimax" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true +release_date = "2025-06" +last_updated = "2025-06" +status = "beta" + +[cost] +input = 0.30 +output = 1.20 + +[limit] +context = 1_048_576 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/mistral-nemo-instruct-2407.toml b/providers/digitalocean/models/mistral-nemo-instruct-2407.toml new file mode 100644 index 000000000..26422a1d2 --- /dev/null +++ b/providers/digitalocean/models/mistral-nemo-instruct-2407.toml @@ -0,0 +1,21 @@ +name = "Mistral NeMo" +family = "mistral" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true +release_date = "2024-07" +last_updated = "2024-07" + +[cost] +input = 0.30 +output = 0.30 + +[limit] +context = 128_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/nvidia-nemotron-3-super-120b.toml b/providers/digitalocean/models/nvidia-nemotron-3-super-120b.toml new file mode 100644 index 000000000..cb0e7a0d6 --- /dev/null +++ b/providers/digitalocean/models/nvidia-nemotron-3-super-120b.toml @@ -0,0 +1,22 @@ +name = "Nemotron-3-Super-120B" +family = "nemotron" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true +release_date = "2025-06" +last_updated = "2025-06" +status = "beta" + +[cost] +input = 0.30 +output = 0.65 + +[limit] +context = 131_072 +output = 32_768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-4.1.toml b/providers/digitalocean/models/openai-gpt-4.1.toml new file mode 100644 index 000000000..53da43caa --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-4.1.toml @@ -0,0 +1,22 @@ +name = "GPT-4.1" +family = "gpt" +attachment = true +reasoning = false +tool_call = true +temperature = true +open_weights = false +release_date = "2025-04" +last_updated = "2025-04" + +[cost] +input = 2.00 +output = 8.00 +cache_read = 0.50 + +[limit] +context = 1_047_576 +output = 32_768 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-4o-mini.toml b/providers/digitalocean/models/openai-gpt-4o-mini.toml new file mode 100644 index 000000000..b8a296b74 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-4o-mini.toml @@ -0,0 +1,22 @@ +name = "GPT-4o mini" +family = "gpt" +attachment = true +reasoning = false +tool_call = true +temperature = true +open_weights = false +release_date = "2024-07" +last_updated = "2024-07" + +[cost] +input = 0.15 +output = 0.60 +cache_read = 0.075 + +[limit] +context = 128_000 +output = 16_384 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-4o.toml b/providers/digitalocean/models/openai-gpt-4o.toml new file mode 100644 index 000000000..7fa11c254 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-4o.toml @@ -0,0 +1,22 @@ +name = "GPT-4o" +family = "gpt" +attachment = true +reasoning = false +tool_call = true +temperature = true +open_weights = false +release_date = "2024-05" +last_updated = "2024-05" + +[cost] +input = 2.50 +output = 10.00 +cache_read = 1.25 + +[limit] +context = 128_000 +output = 16_384 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5-2-pro.toml b/providers/digitalocean/models/openai-gpt-5-2-pro.toml new file mode 100644 index 000000000..d79d9561b --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5-2-pro.toml @@ -0,0 +1,21 @@ +name = "GPT-5.2 pro" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-10" +last_updated = "2025-10" + +[cost] +input = 21.00 +output = 168.00 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5-mini.toml b/providers/digitalocean/models/openai-gpt-5-mini.toml new file mode 100644 index 000000000..44e428cc4 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5-mini.toml @@ -0,0 +1,22 @@ +name = "GPT-5 mini" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-06" +last_updated = "2025-06" + +[cost] +input = 0.25 +output = 2.00 +cache_read = 0.025 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5-nano.toml b/providers/digitalocean/models/openai-gpt-5-nano.toml new file mode 100644 index 000000000..d28d7e69b --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5-nano.toml @@ -0,0 +1,22 @@ +name = "GPT-5 nano" +family = "gpt" +attachment = true +reasoning = false +tool_call = true +temperature = true +open_weights = false +release_date = "2025-06" +last_updated = "2025-06" + +[cost] +input = 0.05 +output = 0.40 +cache_read = 0.005 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.1-codex-max.toml b/providers/digitalocean/models/openai-gpt-5.1-codex-max.toml new file mode 100644 index 000000000..997cea90b --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.1-codex-max.toml @@ -0,0 +1,22 @@ +name = "GPT-5.1-Codex-Max" +family = "gpt" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-08" +last_updated = "2025-08" + +[cost] +input = 1.25 +output = 10.00 +cache_read = 0.125 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.2.toml b/providers/digitalocean/models/openai-gpt-5.2.toml new file mode 100644 index 000000000..1907f5c97 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.2.toml @@ -0,0 +1,22 @@ +name = "GPT-5.2" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-10" +last_updated = "2025-10" + +[cost] +input = 1.75 +output = 14.00 +cache_read = 0.175 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.3-codex.toml b/providers/digitalocean/models/openai-gpt-5.3-codex.toml new file mode 100644 index 000000000..c08bf147c --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.3-codex.toml @@ -0,0 +1,22 @@ +name = "GPT-5.3-Codex" +family = "gpt" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-12" +last_updated = "2025-12" + +[cost] +input = 1.75 +output = 14.00 +cache_read = 0.175 + +[limit] +context = 400_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.4-mini.toml b/providers/digitalocean/models/openai-gpt-5.4-mini.toml new file mode 100644 index 000000000..afa9a1e1f --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.4-mini.toml @@ -0,0 +1,22 @@ +name = "GPT-5.4 mini" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-03" +last_updated = "2026-03" + +[cost] +input = 0.75 +output = 4.50 +cache_read = 0.075 + +[limit] +context = 1_000_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.4-nano.toml b/providers/digitalocean/models/openai-gpt-5.4-nano.toml new file mode 100644 index 000000000..9415f7da7 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.4-nano.toml @@ -0,0 +1,22 @@ +name = "GPT-5.4 nano" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-03" +last_updated = "2026-03" + +[cost] +input = 0.20 +output = 1.25 +cache_read = 0.02 + +[limit] +context = 1_000_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.4-pro.toml b/providers/digitalocean/models/openai-gpt-5.4-pro.toml new file mode 100644 index 000000000..07f7efd23 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.4-pro.toml @@ -0,0 +1,21 @@ +name = "GPT-5.4 pro" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-03" +last_updated = "2026-03" + +[cost] +input = 30.00 +output = 180.00 + +[limit] +context = 1_000_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.4.toml b/providers/digitalocean/models/openai-gpt-5.4.toml new file mode 100644 index 000000000..09cd3188e --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.4.toml @@ -0,0 +1,22 @@ +name = "GPT-5.4" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2026-03" +last_updated = "2026-03" + +[cost] +input = 2.50 +output = 15.00 +cache_read = 0.25 + +[limit] +context = 1_000_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-5.toml b/providers/digitalocean/models/openai-gpt-5.toml new file mode 100644 index 000000000..2990ebe77 --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-5.toml @@ -0,0 +1,22 @@ +name = "GPT-5" +family = "gpt" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-06" +last_updated = "2025-06" + +[cost] +input = 1.25 +output = 10.00 +cache_read = 0.125 + +[limit] +context = 200_000 +output = 128_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-image-1.5.toml b/providers/digitalocean/models/openai-gpt-image-1.5.toml new file mode 100644 index 000000000..1abc64eed --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-image-1.5.toml @@ -0,0 +1,22 @@ +name = "GPT Image 1.5" +family = "gpt" +attachment = true +reasoning = false +tool_call = false +temperature = false +open_weights = false +release_date = "2026-02" +last_updated = "2026-02" + +[cost] +input = 5.00 +output = 10.00 +cache_read = 1.00 + +[limit] +context = 32_000 +output = 4_096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/digitalocean/models/openai-gpt-image-1.toml b/providers/digitalocean/models/openai-gpt-image-1.toml new file mode 100644 index 000000000..358f82e4f --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-image-1.toml @@ -0,0 +1,22 @@ +name = "GPT Image 1" +family = "gpt" +attachment = true +reasoning = false +tool_call = false +temperature = false +open_weights = false +release_date = "2025-04" +last_updated = "2025-04" + +[cost] +input = 5.00 +output = 40.00 +cache_read = 1.25 + +[limit] +context = 32_000 +output = 4_096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/digitalocean/models/openai-gpt-oss-120b.toml b/providers/digitalocean/models/openai-gpt-oss-120b.toml new file mode 100644 index 000000000..dfbba50ea --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-oss-120b.toml @@ -0,0 +1,21 @@ +name = "gpt-oss-120b" +family = "gpt" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true +release_date = "2025-07" +last_updated = "2025-07" + +[cost] +input = 0.10 +output = 0.70 + +[limit] +context = 131_072 +output = 131_072 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-gpt-oss-20b.toml b/providers/digitalocean/models/openai-gpt-oss-20b.toml new file mode 100644 index 000000000..1a307337e --- /dev/null +++ b/providers/digitalocean/models/openai-gpt-oss-20b.toml @@ -0,0 +1,21 @@ +name = "gpt-oss-20b" +family = "gpt" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true +release_date = "2025-07" +last_updated = "2025-07" + +[cost] +input = 0.05 +output = 0.45 + +[limit] +context = 131_072 +output = 131_072 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-o1.toml b/providers/digitalocean/models/openai-o1.toml new file mode 100644 index 000000000..c561b6d88 --- /dev/null +++ b/providers/digitalocean/models/openai-o1.toml @@ -0,0 +1,22 @@ +name = "o1" +family = "o" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2024-12" +last_updated = "2024-12" + +[cost] +input = 15.00 +output = 60.00 +cache_read = 7.50 + +[limit] +context = 200_000 +output = 100_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-o3-mini.toml b/providers/digitalocean/models/openai-o3-mini.toml new file mode 100644 index 000000000..c68682f04 --- /dev/null +++ b/providers/digitalocean/models/openai-o3-mini.toml @@ -0,0 +1,22 @@ +name = "o3-mini" +family = "o" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-01" +last_updated = "2025-01" + +[cost] +input = 1.10 +output = 4.40 +cache_read = 0.55 + +[limit] +context = 200_000 +output = 100_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/models/openai-o3.toml b/providers/digitalocean/models/openai-o3.toml new file mode 100644 index 000000000..2c3c8aa8f --- /dev/null +++ b/providers/digitalocean/models/openai-o3.toml @@ -0,0 +1,22 @@ +name = "o3" +family = "o" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false +release_date = "2025-04" +last_updated = "2025-04" + +[cost] +input = 2.00 +output = 8.00 +cache_read = 0.50 + +[limit] +context = 200_000 +output = 100_000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/digitalocean/models/trinity-large-thinking.toml b/providers/digitalocean/models/trinity-large-thinking.toml new file mode 100644 index 000000000..25b5c4322 --- /dev/null +++ b/providers/digitalocean/models/trinity-large-thinking.toml @@ -0,0 +1,22 @@ +name = "Trinity Large" +attachment = false +reasoning = true +tool_call = false +temperature = true +open_weights = false +release_date = "2025-06" +last_updated = "2025-06" +status = "beta" + +[cost] +input = 0.25 +output = 0.90 +cache_read = 0.06 + +[limit] +context = 128_000 +output = 128_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/digitalocean/provider.toml b/providers/digitalocean/provider.toml new file mode 100644 index 000000000..e1d461b94 --- /dev/null +++ b/providers/digitalocean/provider.toml @@ -0,0 +1,5 @@ +name = "DigitalOcean" +npm = "@ai-sdk/openai-compatible" +api = "https://inference.do-ai.run/v1" +env = ["DIGITALOCEAN_API_KEY"] +doc = "https://docs.digitalocean.com/products/gradient-ai-platform/"