Skip to main content

AI Models & Provider Keys

Manage AI models, configure provider API keys, and track usage analytics.

Overview

Three resources work together:

  • AI Models (client.ai.models) -- Register, deploy, and manage AI models across providers.
  • Provider Keys (client.ai.provider_keys) -- Store and test API keys for third-party AI providers.
  • AI Analytics (client.ai.analytics) -- Track usage, costs, and performance across your AI infrastructure.

AI Models

Listing Models

from strongly import Strongly

client = Strongly()

# List all models
for model in client.ai.models.list():
print(f"{model.name} ({model.provider}) — {model.status}")

# Filter by provider
for model in client.ai.models.list(provider="openai"):
print(f"{model.name}: {model.vendor_model_id}")

# Filter by status
for model in client.ai.models.list(status="active"):
print(f"{model.name} — ready")

# Filter by type
for model in client.ai.models.list(type="third-party"):
print(model.name)

# Search by name
for model in client.ai.models.list(search="gpt"):
print(model.name)

# Get all as a list
all_models = client.ai.models.list().to_list()

Model Overview

Get a summary of model counts by status:

from strongly import Strongly

client = Strongly()

overview = client.ai.models.overview()
print(f"Total models: {overview.total}")
print(f"Active: {overview.active}")
print(f"Deploying: {overview.deploying}")
print(f"Stopped: {overview.stopped}")
print(f"Failed: {overview.failed}")
print(f"Third-party: {overview.third_party}")
print(f"Self-hosted: {overview.self_hosted}")

Registering a Model

from strongly import Strongly

client = Strongly()

model = client.ai.models.create({
"name": "GPT-4o Mini",
"type": "third-party",
"provider": "openai",
"vendor_model_id": "gpt-4o-mini",
"model_type": "chat",
"description": "Fast and affordable chat model",
"capabilities": ["chat", "function_calling"],
"max_tokens": 16384,
"context_window": 128000,
"config": {
"default_temperature": 0.7,
},
})

print(f"Registered: {model.name} (ID: {model.id})")

Retrieving and Updating

from strongly import Strongly

client = Strongly()

# Retrieve a model
model = client.ai.models.retrieve("model-abc123")
print(f"{model.name}{model.status}")
print(f"Provider: {model.provider}")
print(f"Context window: {model.context_window}")

# Update model details
updated = client.ai.models.update("model-abc123", {
"description": "Updated description",
"max_tokens": 32768,
})
print(f"Updated: {updated.name}")

Deploying and Managing Lifecycle

from strongly import Strongly
import time

client = Strongly()

# Deploy a self-hosted model
client.ai.models.deploy("model-abc123")

# Check deployment status
while True:
status = client.ai.models.status("model-abc123")
print(f"Status: {status.status}, Replicas: {status.ready_replicas}/{status.replicas}")

if status.ready_replicas and status.ready_replicas > 0:
print("Model is ready")
break
time.sleep(5)

# Stop a running model
client.ai.models.stop("model-abc123")

# Start a stopped model
client.ai.models.start("model-abc123")

# Delete a model
client.ai.models.delete("model-abc123")

Monitoring

from strongly import Strongly

client = Strongly()

# Get model metrics
metrics = client.ai.models.metrics("model-abc123")
print(metrics)

# View model logs
logs = client.ai.models.logs("model-abc123", lines=100, since="1h")
print(logs)

# Logs from a specific container
logs = client.ai.models.logs("model-abc123", container="inference")
print(logs)

Permissions

Control who can access a model:

from strongly import Strongly

client = Strongly()

# Get current permissions
perms = client.ai.models.get_permissions("model-abc123")
print(f"Owner: {perms.owner}")
print(f"Shared: {perms.is_shared}")
print(f"Shared with: {perms.shared_with}")
print(f"Organization: {perms.organization_id}")

# Share with the entire organization
client.ai.models.update_permissions("model-abc123", is_shared=True)

# Share with specific users
client.ai.models.update_permissions("model-abc123", shared_with=["user-1", "user-2"])

AI Models Method Reference

MethodDescriptionReturns
list(*, search=None, type=None, status=None, provider=None, model_type=None, limit=50)List models with optional filtersSyncPaginator[AIModel]
overview()Get model count summaryAIModelOverview
create(body)Register a new modelAIModel
retrieve(model_id)Get a model by IDAIModel
update(model_id, body)Update model fieldsAIModel
delete(model_id)Delete a modeldict
deploy(model_id, **kwargs)Deploy a self-hosted modeldict
start(model_id)Start a stopped modeldict
stop(model_id)Stop a running modeldict
status(model_id)Get deployment statusAIModelStatus
metrics(model_id)Get model metricsdict
logs(model_id, *, lines=None, since=None, container=None)Get model logsdict
get_permissions(model_id)Get model permissionsAIModelPermissions
update_permissions(model_id, *, is_shared=None, shared_with=None)Update model permissionsdict

AIModel Fields

FieldTypeDescription
idstrUnique model identifier
namestrDisplay name
typestrModel type (third-party, self-hosted)
providerstrProvider name (openai, anthropic, huggingface, etc.)
vendor_model_idstrProvider's model identifier
model_typestrCapability type (chat, completion, embedding)
statusstrCurrent status (active, deploying, stopped, failed)
descriptionstrHuman-readable description
capabilitieslistList of supported capabilities
max_tokensintMaximum output tokens
context_windowintMaximum context length in tokens
ownerstrOwner user ID
organization_idstrOwning organization
is_sharedboolWhether the model is shared
shared_withlistUser IDs with access
configdictModel-specific configuration
created_atstrCreation timestamp
updated_atstrLast update timestamp

AIModelOverview Fields

FieldTypeDescription
totalintTotal model count
activeintActive models
deployingintModels currently deploying
stoppedintStopped models
failedintFailed models
third_partyintThird-party provider models
self_hostedintSelf-hosted models

AIModelStatus Fields

FieldTypeDescription
statusstrCurrent deployment status
replicasintDesired replica count
ready_replicasintReplicas ready to serve
conditionslistDeployment conditions

AIModelPermissions Fields

FieldTypeDescription
ownerstrOwner user ID
shared_withlistUser IDs with access
is_sharedboolWhether model is shared
organization_idstrOwning organization

Provider Keys

Provider keys store API credentials for third-party AI services. Keys are encrypted at rest and only the masked version is returned after creation.

Listing Keys

from strongly import Strongly

client = Strongly()

# List all provider keys
for key in client.ai.provider_keys.list():
print(f"{key.name} ({key.provider}) — {key.masked_key}")

# Filter by provider
for key in client.ai.provider_keys.list(provider="openai"):
print(f"{key.name}: last used {key.last_used_at}")

# Filter by status
for key in client.ai.provider_keys.list(status="active"):
print(key.name)

Creating a Key

from strongly import Strongly

client = Strongly()

key = client.ai.provider_keys.create({
"name": "Production OpenAI Key",
"provider": "openai",
"api_key": "sk-...",
"description": "Main production key for chat models",
"provider_organization": "org-abc123",
})

print(f"Created: {key.name} (ID: {key.id})")
print(f"Masked: {key.masked_key}")

Testing a Key

Verify that a stored key is valid:

from strongly import Strongly

client = Strongly()

result = client.ai.provider_keys.test("key-abc123")

if result.success:
print(f"Key is valid (tested at {result.tested_at})")
else:
print(f"Key test failed: {result.message}")

Updating and Deleting

from strongly import Strongly

client = Strongly()

# Update key details
updated = client.ai.provider_keys.update("key-abc123", {
"name": "Updated Key Name",
"description": "Rotated key for Q2 2025",
})
print(f"Updated: {updated.name}")

# Retrieve key details
key = client.ai.provider_keys.retrieve("key-abc123")
print(f"{key.name}{key.provider}")

# Delete a key
client.ai.provider_keys.delete("key-abc123")

Provider Keys Method Reference

MethodDescriptionReturns
list(*, provider=None, status=None, search=None, limit=50)List provider keysSyncPaginator[ProviderKey]
create(body)Create a new provider keyProviderKey
retrieve(key_id)Get a key by IDProviderKey
update(key_id, body)Update key fieldsProviderKey
delete(key_id)Delete a keydict
test(key_id)Test if a key is validProviderKeyTestResult

ProviderKey Fields

FieldTypeDescription
idstrUnique key identifier
namestrDisplay name
providerstrAI provider (openai, anthropic, etc.)
descriptionstrHuman-readable description
provider_organizationstrProvider-side organization ID
ownerstrOwner user ID
organization_idstrOwning organization
is_activeboolWhether the key is active
last_used_atstrLast usage timestamp
last_tested_atstrLast test timestamp
test_resultdictLast test result details
masked_keystrMasked version of the API key (e.g., sk-...abc)
created_atstrCreation timestamp
updated_atstrLast update timestamp

ProviderKeyTestResult Fields

FieldTypeDescription
successboolWhether the key is valid
messagestrResult message or error details
tested_atstrTimestamp of the test

AI Analytics

Track usage, costs, and performance metrics across your AI models and providers.

Usage Statistics

from strongly import Strongly

client = Strongly()

# Overall usage
usage = client.ai.analytics.usage()
print(f"Total requests: {usage.total_requests}")
print(f"Total tokens: {usage.total_tokens}")
print(f"Prompt tokens: {usage.prompt_tokens}")
print(f"Completion tokens: {usage.completion_tokens}")

# Usage for a specific model
usage = client.ai.analytics.usage(model_id="model-abc123")
print(f"Model requests: {usage.total_requests}")

# Usage for a date range
usage = client.ai.analytics.usage(
start_date="2025-01-01",
end_date="2025-01-31",
granularity="daily",
)
print(f"Period: {usage.period}")
for entry in usage.breakdown:
print(f" {entry}")

Cost Breakdown

from strongly import Strongly

client = Strongly()

# Total costs
costs = client.ai.analytics.costs()
print(f"Total cost: {costs.currency} {costs.total_cost:.2f}")

# Costs grouped by model
costs = client.ai.analytics.costs(group_by="model")
for entry in costs.by_model:
print(f" {entry}")

# Costs grouped by provider
costs = client.ai.analytics.costs(group_by="provider")
for entry in costs.by_provider:
print(f" {entry}")

# Costs for a specific provider and date range
costs = client.ai.analytics.costs(
provider="openai",
start_date="2025-01-01",
end_date="2025-01-31",
)
print(f"OpenAI cost: {costs.currency} {costs.total_cost:.2f}")

Performance Metrics

from strongly import Strongly

client = Strongly()

# Overall performance
perf = client.ai.analytics.performance()
print(f"Average latency: {perf.avg_latency_ms:.1f}ms")
print(f"P50 latency: {perf.p50_latency_ms:.1f}ms")
print(f"P95 latency: {perf.p95_latency_ms:.1f}ms")
print(f"P99 latency: {perf.p99_latency_ms:.1f}ms")
print(f"Error rate: {perf.error_rate:.2%}")
print(f"Throughput: {perf.throughput:.1f} req/s")

# Performance for a specific model
perf = client.ai.analytics.performance(model_id="model-abc123")
print(f"Model avg latency: {perf.avg_latency_ms:.1f}ms")

Time Series Data

from strongly import Strongly

client = Strongly()

# Request count over time
series = client.ai.analytics.time_series(
metric="requests",
granularity="daily",
start_date="2025-01-01",
end_date="2025-01-31",
)
print(series)

# Latency over time for a specific model
series = client.ai.analytics.time_series(
metric="latency",
granularity="hourly",
model_id="model-abc123",
start_date="2025-01-15",
end_date="2025-01-16",
)
print(series)

Provider Statistics

from strongly import Strongly

client = Strongly()

# Get stats grouped by provider
providers = client.ai.analytics.providers()
print(providers)

# For a specific date range
providers = client.ai.analytics.providers(
start_date="2025-01-01",
end_date="2025-01-31",
)
print(providers)

AI Analytics Method Reference

MethodDescriptionReturns
usage(*, start_date=None, end_date=None, model_id=None, provider=None, granularity=None)Get usage statisticsUsageStats
costs(*, start_date=None, end_date=None, model_id=None, provider=None, group_by=None)Get cost breakdownCostBreakdown
performance(*, start_date=None, end_date=None, model_id=None, provider=None)Get performance metricsPerformanceStats
time_series(*, start_date=None, end_date=None, model_id=None, metric=None, granularity=None, provider=None)Get time series dataTimeSeriesData
providers(*, start_date=None, end_date=None)Get per-provider statisticsProviderStats

UsageStats Fields

FieldTypeDescription
total_requestsintTotal number of requests
total_tokensintTotal tokens consumed
prompt_tokensintTokens in prompts
completion_tokensintTokens in completions
periodstrReporting period label
start_datestrPeriod start date
end_datestrPeriod end date
model_idstrFilter model (if specified)
providerstrFilter provider (if specified)
granularitystrData granularity (hourly, daily, weekly, monthly)
breakdownlistPer-period breakdown entries

CostBreakdown Fields

FieldTypeDescription
total_costfloatTotal cost for the period
currencystrCurrency code (default: "USD")
periodstrReporting period label
start_datestrPeriod start date
end_datestrPeriod end date
model_idstrFilter model (if specified)
providerstrFilter provider (if specified)
group_bystrGrouping field (model, provider)
by_modellistCost entries grouped by model
by_providerlistCost entries grouped by provider

PerformanceStats Fields

FieldTypeDescription
start_datestrPeriod start date
end_datestrPeriod end date
model_idstrFilter model (if specified)
providerstrFilter provider (if specified)
avg_latency_msfloatAverage latency in milliseconds
p50_latency_msfloat50th percentile latency
p95_latency_msfloat95th percentile latency
p99_latency_msfloat99th percentile latency
error_ratefloatFraction of requests that failed
throughputfloatRequests per second

Complete Example

from strongly import Strongly

def main():
client = Strongly()

# --- Model overview ---
print("=== Model Overview ===")
overview = client.ai.models.overview()
print(f"Total: {overview.total}, Active: {overview.active}, Stopped: {overview.stopped}")

# --- Register a third-party model ---
print("\n=== Register Model ===")
model = client.ai.models.create({
"name": "Claude 3.5 Sonnet",
"type": "third-party",
"provider": "anthropic",
"vendor_model_id": "claude-3-5-sonnet-20241022",
"model_type": "chat",
"description": "Anthropic's balanced model for complex tasks",
"capabilities": ["chat", "function_calling", "vision"],
"max_tokens": 8192,
"context_window": 200000,
})
print(f"Created: {model.name} (ID: {model.id})")

# --- Add a provider key ---
print("\n=== Provider Key ===")
key = client.ai.provider_keys.create({
"name": "Anthropic Production Key",
"provider": "anthropic",
"api_key": "sk-ant-...",
"description": "Production key for Claude models",
})
print(f"Key created: {key.name} ({key.masked_key})")

# Test the key
result = client.ai.provider_keys.test(key.id)
print(f"Key valid: {result.success}")

# --- List active models ---
print("\n=== Active Models ===")
for m in client.ai.models.list(status="active"):
print(f" {m.name} ({m.provider}/{m.vendor_model_id})")

# --- Check permissions ---
print("\n=== Model Permissions ===")
perms = client.ai.models.get_permissions(model.id)
print(f"Owner: {perms.owner}, Shared: {perms.is_shared}")

# Share with the organization
client.ai.models.update_permissions(model.id, is_shared=True)
print("Model shared with organization")

# --- Usage analytics ---
print("\n=== Usage Analytics ===")
usage = client.ai.analytics.usage(
start_date="2025-01-01",
end_date="2025-01-31",
)
print(f"Requests: {usage.total_requests}, Tokens: {usage.total_tokens}")

# --- Cost breakdown ---
print("\n=== Cost Breakdown ===")
costs = client.ai.analytics.costs(group_by="provider")
print(f"Total cost: {costs.currency} {costs.total_cost:.2f}")

# --- Performance ---
print("\n=== Performance ===")
perf = client.ai.analytics.performance()
print(f"Avg latency: {perf.avg_latency_ms:.1f}ms, Error rate: {perf.error_rate:.2%}")

# --- Cleanup ---
print("\n=== Cleanup ===")
client.ai.provider_keys.delete(key.id)
client.ai.models.delete(model.id)
print("Done")

if __name__ == "__main__":
main()