List Llm Models
/v1/models/
List available LLM models using the asynchronous implementation for improved performance.
Returns Model format which extends LLMConfig with additional metadata fields. Legacy LLMConfig fields are marked as deprecated but still available for backward compatibility.
Query Parameters
provider_name: optional string
Returns
List Llm Models
curl https://api.letta.com/v1/models/ \
-H "Authorization: Bearer $LETTA_API_KEY"
[
{
"context_window": 0,
"max_context_window": 0,
"model": "model",
"model_endpoint_type": "openai",
"name": "name",
"provider_type": "anthropic",
"compatibility_type": "gguf",
"display_name": "display_name",
"enable_reasoner": true,
"frequency_penalty": 0,
"handle": "handle",
"max_reasoning_tokens": 0,
"max_tokens": 0,
"model_endpoint": "model_endpoint",
"model_type": "llm",
"model_wrapper": "model_wrapper",
"parallel_tool_calls": true,
"provider_category": "base",
"provider_name": "provider_name",
"put_inner_thoughts_in_kwargs": true,
"reasoning_effort": "minimal",
"temperature": 0,
"tier": "tier",
"verbosity": "low"
}
]Returns Examples
[
{
"context_window": 0,
"max_context_window": 0,
"model": "model",
"model_endpoint_type": "openai",
"name": "name",
"provider_type": "anthropic",
"compatibility_type": "gguf",
"display_name": "display_name",
"enable_reasoner": true,
"frequency_penalty": 0,
"handle": "handle",
"max_reasoning_tokens": 0,
"max_tokens": 0,
"model_endpoint": "model_endpoint",
"model_type": "llm",
"model_wrapper": "model_wrapper",
"parallel_tool_calls": true,
"provider_category": "base",
"provider_name": "provider_name",
"put_inner_thoughts_in_kwargs": true,
"reasoning_effort": "minimal",
"temperature": 0,
"tier": "tier",
"verbosity": "low"
}
]