Files
laravel-llm-gateway/laravel-app/app/Http/Controllers/Api/ProviderController.php
wtrinkl cb495e18e3 Fix API controllers to use correct database column names
- Fix model_pricing table references (model_id -> model, display_name -> model)
- Fix price columns (output_price_per_1k -> output_price_per_million)
- Add price conversion (per_million / 1000 = per_1k) in all API responses
- Add whereNotNull('model') filters to exclude invalid entries
- Add getModelDisplayName() helper method to all controllers
- Fix AccountController to use gateway_users budget fields directly
- Remove Budget model dependencies from AccountController
- Add custom Scramble server URL configuration for API docs
- Create ScrambleServiceProvider to set correct /api prefix
- Add migration to rename user_id to gateway_user_id in llm_requests
- Add custom ApiGuard for gateway_users authentication
- Update all API controllers: AccountController, ModelController, PricingController, ProviderController

All API endpoints now working correctly:
- GET /api/account
- GET /api/models
- GET /api/pricing
- GET /api/providers/{provider}
2025-11-19 19:36:58 +01:00

324 lines
11 KiB
PHP

<?php
namespace App\Http\Controllers\Api;
use App\Http\Controllers\Controller;
use App\Services\LLM\ProviderFactory;
use App\Models\{GatewayUserCredential, LlmRequest, ModelPricing};
use Illuminate\Http\JsonResponse;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
class ProviderController extends Controller
{
/**
* Get list of all supported LLM providers
*
* Returns a list of all LLM providers supported by the gateway, including their
* availability status, credential status for the authenticated user, and basic statistics.
*
* ## Response Fields
*
* - `id` - Provider identifier (openai, anthropic, gemini, deepseek, mistral)
* - `name` - Human-readable provider name
* - `status` - Always "available" for supported providers
* - `has_credentials` - Whether the user has configured credentials for this provider
* - `credentials_status` - Status of the credentials (active, inactive, null if not configured)
* - `last_tested` - When credentials were last tested (ISO 8601)
* - `supported_features` - Array of supported features (chat, streaming, etc.)
* - `models_count` - Number of models available from this provider
*
* ## Example Response
*
* ```json
* {
* "data": [
* {
* "id": "openai",
* "name": "OpenAI",
* "status": "available",
* "has_credentials": true,
* "credentials_status": "active",
* "last_tested": "2025-11-19T10:30:00Z",
* "supported_features": ["chat", "streaming"],
* "models_count": 12
* }
* ]
* }
* ```
*
* @tags Providers
*
* @param Request $request
* @return JsonResponse
*/
public function index(Request $request): JsonResponse
{
$user = $request->user();
$providers = ProviderFactory::getSupportedProviders();
$providerData = [];
foreach ($providers as $providerId) {
// Get credential info for this provider
$credential = GatewayUserCredential::where('gateway_user_id', $user->user_id)
->where('provider', $providerId)
->first();
// Get model count for this provider
$modelsCount = ModelPricing::where('provider', $providerId)
->where('is_active', true)
->whereNotNull('model')
->count();
$providerData[] = [
'id' => $providerId,
'name' => $this->getProviderName($providerId),
'status' => 'available',
'has_credentials' => $credential !== null,
'credentials_status' => $credential?->is_active ? 'active' : ($credential ? 'inactive' : null),
'last_tested' => $credential?->last_tested_at?->toIso8601String(),
'supported_features' => $this->getProviderFeatures($providerId),
'models_count' => $modelsCount,
];
}
return response()->json([
'data' => $providerData,
]);
}
/**
* Get detailed information about a specific provider
*
* Returns comprehensive information about a specific LLM provider, including:
* - Provider details and capabilities
* - User's credential status
* - Available models with pricing
* - Usage statistics
*
* ## Path Parameters
*
* - `provider` - Provider ID (openai, anthropic, gemini, deepseek, mistral)
*
* ## Example Response
*
* ```json
* {
* "data": {
* "id": "openai",
* "name": "OpenAI",
* "description": "OpenAI GPT Models",
* "status": "available",
* "has_credentials": true,
* "credentials_status": "active",
* "credentials": {
* "api_key_format": "sk-...",
* "organization_id_required": false,
* "last_tested": "2025-11-19T10:30:00Z",
* "test_status": "success"
* },
* "supported_features": ["chat", "streaming", "function_calling"],
* "models": [
* {
* "id": "gpt-4-turbo",
* "name": "GPT-4 Turbo",
* "context_window": 128000,
* "max_output_tokens": 4096,
* "supports_streaming": true,
* "pricing": {
* "input_per_1k": 0.01,
* "output_per_1k": 0.03,
* "currency": "USD"
* }
* }
* ],
* "statistics": {
* "total_requests": 1250,
* "total_cost": 45.67,
* "total_tokens": 2500000,
* "last_used": "2025-11-19T11:45:00Z"
* }
* }
* }
* ```
*
* @tags Providers
*
* @param Request $request
* @param string $provider
* @return JsonResponse
*/
public function show(Request $request, string $provider): JsonResponse
{
// Validate provider exists
$supportedProviders = ProviderFactory::getSupportedProviders();
if (!in_array($provider, $supportedProviders)) {
return response()->json([
'error' => [
'code' => 'not_found',
'message' => "Provider '{$provider}' not found",
'status' => 404,
],
], 404);
}
$user = $request->user();
// Get credential info
$credential = GatewayUserCredential::where('gateway_user_id', $user->user_id)
->where('provider', $provider)
->first();
// Get models for this provider
$models = ModelPricing::where('provider', $provider)
->where('is_active', true)
->whereNotNull('model')
->orderBy('model')
->get()
->map(function ($model) {
return [
'id' => $model->model,
'name' => $this->getModelDisplayName($model->model),
'context_window' => $model->context_window,
'max_output_tokens' => $model->max_output_tokens,
'supports_streaming' => true, // Default to true for now
'supports_function_calling' => in_array($model->provider, ['openai', 'anthropic']),
'pricing' => [
'input_per_1k' => round($model->input_price_per_million / 1000, 6),
'output_per_1k' => round($model->output_price_per_million / 1000, 6),
'currency' => 'USD',
],
];
});
// Get usage statistics for this provider
$statistics = LlmRequest::where('gateway_user_id', $user->user_id)
->where('provider', $provider)
->where('status', 'success')
->selectRaw('
COUNT(*) as total_requests,
SUM(total_cost) as total_cost,
SUM(total_tokens) as total_tokens,
MAX(created_at) as last_used
')
->first();
$response = [
'data' => [
'id' => $provider,
'name' => $this->getProviderName($provider),
'description' => $this->getProviderDescription($provider),
'status' => 'available',
'has_credentials' => $credential !== null,
'credentials_status' => $credential?->is_active ? 'active' : ($credential ? 'inactive' : null),
'credentials' => $credential ? [
'api_key_format' => $this->getApiKeyFormat($provider),
'organization_id_required' => false,
'last_tested' => $credential->last_tested_at?->toIso8601String(),
'test_status' => $credential->test_status,
] : null,
'supported_features' => $this->getProviderFeatures($provider),
'api_documentation' => $this->getProviderDocUrl($provider),
'models' => $models,
'statistics' => [
'total_requests' => $statistics->total_requests ?? 0,
'total_cost' => round($statistics->total_cost ?? 0, 4),
'total_tokens' => $statistics->total_tokens ?? 0,
'last_used' => $statistics->last_used?->toIso8601String(),
],
],
];
return response()->json($response);
}
/**
* Get human-readable provider name
*/
private function getProviderName(string $provider): string
{
return match ($provider) {
'openai' => 'OpenAI',
'anthropic' => 'Anthropic',
'gemini' => 'Google Gemini',
'deepseek' => 'DeepSeek',
'mistral' => 'Mistral AI',
default => ucfirst($provider),
};
}
/**
* Get provider description
*/
private function getProviderDescription(string $provider): string
{
return match ($provider) {
'openai' => 'OpenAI GPT Models',
'anthropic' => 'Anthropic Claude Models',
'gemini' => 'Google Gemini Models',
'deepseek' => 'DeepSeek AI Models',
'mistral' => 'Mistral AI Models',
default => "{$provider} Models",
};
}
/**
* Get provider features
*/
private function getProviderFeatures(string $provider): array
{
$baseFeatures = ['chat'];
// All providers support streaming
$baseFeatures[] = 'streaming';
// OpenAI and Anthropic support function calling
if (in_array($provider, ['openai', 'anthropic'])) {
$baseFeatures[] = 'function_calling';
}
return $baseFeatures;
}
/**
* Get API key format hint
*/
private function getApiKeyFormat(string $provider): string
{
return match ($provider) {
'openai' => 'sk-...',
'anthropic' => 'sk-ant-...',
'gemini' => 'AI...',
'deepseek' => 'sk-...',
'mistral' => 'sk-...',
default => 'API key',
};
}
/**
* Get provider documentation URL
*/
private function getProviderDocUrl(string $provider): string
{
return match ($provider) {
'openai' => 'https://platform.openai.com/docs/api-reference',
'anthropic' => 'https://docs.anthropic.com/claude/reference',
'gemini' => 'https://ai.google.dev/docs',
'deepseek' => 'https://platform.deepseek.com/api-docs',
'mistral' => 'https://docs.mistral.ai/api',
default => '#',
};
}
/**
* Get model display name from model ID
*/
private function getModelDisplayName(string $modelId): string
{
// Convert model ID to a readable display name
// e.g., "gpt-4-turbo" -> "GPT-4 Turbo"
return ucwords(str_replace(['-', '_'], ' ', $modelId));
}
}