from openapi import SDK
with SDK(
bearer_auth="<YOUR_BEARER_TOKEN_HERE>",
) as sdk:
res = sdk.llm.models.add_registry_model(id="meetkai:my-custom-chat-v1", provider="openai-compatible", model_id="my-custom-chat-v1", api_format="responses", created=1704067200, auth={
"type": "api-key",
"env_var": "CUSTOM_MODEL_API_KEY",
}, capabilities={
"modalities": {
"input": [
"text",
],
"output": [
"text",
],
},
"reasoning": True,
"supports_temperature": True,
"supports_top_p": True,
}, context_window=131072, display_name="My Custom Chat v1", base_url="https://models.example.com/v1", rpm=120, default_params={
"temperature": 0.2,
"top_p": 0.95,
}, hidden=False, health_check={
"url": "https://models.example.com/health",
"method": "GET",
})
# Handle response
print(res){
"id": "meetkai:my-custom-chat-v1",
"provider": "openai-compatible",
"model_id": "my-custom-chat-v1",
"source": "database",
"immutable": false,
"definition": {
"id": "meetkai:my-custom-chat-v1",
"provider": "openai-compatible",
"modelId": "my-custom-chat-v1",
"displayName": "My Custom Chat v1",
"baseUrl": "https://models.example.com/v1",
"apiFormat": "responses",
"created": 1704067200,
"auth": {
"type": "api-key",
"envVar": "CUSTOM_MODEL_API_KEY"
},
"capabilities": {
"modalities": {
"input": [
"text"
],
"output": [
"text"
]
},
"reasoning": true,
"supportsTemperature": true,
"supportsTopP": true
},
"contextWindow": 131072,
"rpm": 120,
"defaultParams": {
"temperature": 0.2,
"topP": 0.95
},
"hidden": false,
"healthCheck": {
"url": "https://models.example.com/health",
"method": "GET"
},
"source": "database",
"immutable": false
},
"health": {
"is_available": true,
"last_health_check": 1704067200000,
"error": null
},
"created_at": 1704067200000,
"updated_at": 1704153600000
}Agrega un nuevo modelo de origen de base de datos al registro. Los modelos YAML no pueden ser sobreescritos.
from openapi import SDK
with SDK(
bearer_auth="<YOUR_BEARER_TOKEN_HERE>",
) as sdk:
res = sdk.llm.models.add_registry_model(id="meetkai:my-custom-chat-v1", provider="openai-compatible", model_id="my-custom-chat-v1", api_format="responses", created=1704067200, auth={
"type": "api-key",
"env_var": "CUSTOM_MODEL_API_KEY",
}, capabilities={
"modalities": {
"input": [
"text",
],
"output": [
"text",
],
},
"reasoning": True,
"supports_temperature": True,
"supports_top_p": True,
}, context_window=131072, display_name="My Custom Chat v1", base_url="https://models.example.com/v1", rpm=120, default_params={
"temperature": 0.2,
"top_p": 0.95,
}, hidden=False, health_check={
"url": "https://models.example.com/health",
"method": "GET",
})
# Handle response
print(res){
"id": "meetkai:my-custom-chat-v1",
"provider": "openai-compatible",
"model_id": "my-custom-chat-v1",
"source": "database",
"immutable": false,
"definition": {
"id": "meetkai:my-custom-chat-v1",
"provider": "openai-compatible",
"modelId": "my-custom-chat-v1",
"displayName": "My Custom Chat v1",
"baseUrl": "https://models.example.com/v1",
"apiFormat": "responses",
"created": 1704067200,
"auth": {
"type": "api-key",
"envVar": "CUSTOM_MODEL_API_KEY"
},
"capabilities": {
"modalities": {
"input": [
"text"
],
"output": [
"text"
]
},
"reasoning": true,
"supportsTemperature": true,
"supportsTopP": true
},
"contextWindow": 131072,
"rpm": 120,
"defaultParams": {
"temperature": 0.2,
"topP": 0.95
},
"hidden": false,
"healthCheck": {
"url": "https://models.example.com/health",
"method": "GET"
},
"source": "database",
"immutable": false
},
"health": {
"is_available": true,
"last_health_check": 1704067200000,
"error": null
},
"created_at": 1704067200000,
"updated_at": 1704153600000
}Gateway auth: send Authorization: Bearer <mka1-api-key>. For multi-user server-side integrations, you can also send X-On-Behalf-Of: <external-user-id>.
responses, completions, embeddings, images 0 <= x <= 9007199254740991Show child attributes
Show child attributes
-9007199254740991 < x <= 9007199254740991-9007199254740991 < x <= 9007199254740991Show child attributes
Show child attributes
Show child attributes
Show child attributes
Está bien
¿Esta página le ayudó?