from gcore import Gcore
client = Gcore(
api_key="My API Key",
)
inference_model = client.cloud.inference.models.get(
"model_id",
)
print(inference_model.id)
{
"category": "Text Classification",
"default_flavor_name": "inference-16vcpu-232gib-1xh100-80gb",
"description": "My first model",
"developer": "Stability AI",
"documentation_page": "/docs",
"eula_url": "https://example.com/eula",
"example_curl_request": "curl -X POST http://localhost:8080/predict -d '{\"data\": \"sample\"}'",
"has_eula": true,
"id": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"image_registry_id": "123e4567-e89b-12d3-a456-426614174999",
"image_url": "registry.hub.docker.com/my_model:latest",
"inference_backend": "torch",
"inference_frontend": "gradio",
"model_id": "mistralai/Pixtral-12B-2409",
"name": "model1",
"openai_compatibility": "full",
"port": 8080,
"version": "v0.1"
}
from gcore import Gcore
client = Gcore(
api_key="My API Key",
)
inference_model = client.cloud.inference.models.get(
"model_id",
)
print(inference_model.id)
{
"category": "Text Classification",
"default_flavor_name": "inference-16vcpu-232gib-1xh100-80gb",
"description": "My first model",
"developer": "Stability AI",
"documentation_page": "/docs",
"eula_url": "https://example.com/eula",
"example_curl_request": "curl -X POST http://localhost:8080/predict -d '{\"data\": \"sample\"}'",
"has_eula": true,
"id": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"image_registry_id": "123e4567-e89b-12d3-a456-426614174999",
"image_url": "registry.hub.docker.com/my_model:latest",
"inference_backend": "torch",
"inference_frontend": "gradio",
"model_id": "mistralai/Pixtral-12B-2409",
"name": "model1",
"openai_compatibility": "full",
"port": 8080,
"version": "v0.1"
}
API key for authentication. Make sure to include the word apikey
, followed by a single space and then your token.
Example: apikey 1234$abcdef
Model ID
OK
The response is of type object
.
Was this page helpful?