LLM Gateway

Models

List all available models

GET
/v1/models

Query Parameters

include_deactivated?string

Include deactivated models in the response

exclude_deprecated?string

Exclude deprecated models from the response

Response Body

curl -X GET "https://api.llmgateway.io/v1/models?include_deactivated=false&exclude_deprecated=false"
{
  "data": [
    {
      "id": "string",
      "name": "string",
      "aliases": [
        "string"
      ],
      "created": 0,
      "description": "string",
      "family": "string",
      "architecture": {
        "input_modalities": [
          "text"
        ],
        "output_modalities": [
          "text"
        ],
        "tokenizer": "string"
      },
      "top_provider": {
        "is_moderated": true
      },
      "providers": [
        {
          "providerId": "string",
          "modelName": "string",
          "pricing": {
            "prompt": "string",
            "completion": "string",
            "image": "string"
          },
          "streaming": true,
          "vision": true,
          "cancellation": true,
          "tools": true,
          "parallelToolCalls": true,
          "reasoning": true,
          "stability": "stable"
        }
      ],
      "pricing": {
        "prompt": "string",
        "completion": "string",
        "image": "string",
        "request": "string",
        "input_cache_read": "string",
        "input_cache_write": "string",
        "web_search": "string",
        "internal_reasoning": "string"
      },
      "context_length": 0,
      "per_request_limits": {
        "property1": "string",
        "property2": "string"
      },
      "supported_parameters": [
        "string"
      ],
      "json_output": true,
      "free": true,
      "deprecated_at": "string",
      "deactivated_at": "string",
      "stability": "stable"
    }
  ]
}

How is this guide?

Last updated on