user(); $providers = ProviderFactory::getSupportedProviders(); $providerData = []; foreach ($providers as $providerId) { // Get credential info for this provider $credential = GatewayUserCredential::where('gateway_user_id', $user->user_id) ->where('provider', $providerId) ->first(); // Get model count for this provider $modelsCount = ModelPricing::where('provider', $providerId) ->where('is_active', true) ->whereNotNull('model') ->count(); $providerData[] = [ 'id' => $providerId, 'name' => $this->getProviderName($providerId), 'status' => 'available', 'has_credentials' => $credential !== null, 'credentials_status' => $credential?->is_active ? 'active' : ($credential ? 'inactive' : null), 'last_tested' => $credential?->last_tested_at?->toIso8601String(), 'supported_features' => $this->getProviderFeatures($providerId), 'models_count' => $modelsCount, ]; } return response()->json([ 'data' => $providerData, ]); } /** * Get detailed information about a specific provider * * Returns comprehensive information about a specific LLM provider, including: * - Provider details and capabilities * - User's credential status * - Available models with pricing * - Usage statistics * * ## Path Parameters * * - `provider` - Provider ID (openai, anthropic, gemini, deepseek, mistral) * * ## Example Response * * ```json * { * "data": { * "id": "openai", * "name": "OpenAI", * "description": "OpenAI GPT Models", * "status": "available", * "has_credentials": true, * "credentials_status": "active", * "credentials": { * "api_key_format": "sk-...", * "organization_id_required": false, * "last_tested": "2025-11-19T10:30:00Z", * "test_status": "success" * }, * "supported_features": ["chat", "streaming", "function_calling"], * "models": [ * { * "id": "gpt-4-turbo", * "name": "GPT-4 Turbo", * "context_window": 128000, * "max_output_tokens": 4096, * "supports_streaming": true, * "pricing": { * "input_per_1k": 0.01, * "output_per_1k": 0.03, * "currency": "USD" * } * } * ], * "statistics": { * "total_requests": 1250, * "total_cost": 45.67, * "total_tokens": 2500000, * "last_used": "2025-11-19T11:45:00Z" * } * } * } * ``` * * @tags Providers * * @param Request $request * @param string $provider * @return JsonResponse */ public function show(Request $request, string $provider): JsonResponse { // Validate provider exists $supportedProviders = ProviderFactory::getSupportedProviders(); if (!in_array($provider, $supportedProviders)) { return response()->json([ 'error' => [ 'code' => 'not_found', 'message' => "Provider '{$provider}' not found", 'status' => 404, ], ], 404); } $user = $request->user(); // Get credential info $credential = GatewayUserCredential::where('gateway_user_id', $user->user_id) ->where('provider', $provider) ->first(); // Get models for this provider $models = ModelPricing::where('provider', $provider) ->where('is_active', true) ->whereNotNull('model') ->orderBy('model') ->get() ->map(function ($model) { return [ 'id' => $model->model, 'name' => $this->getModelDisplayName($model->model), 'context_window' => $model->context_window, 'max_output_tokens' => $model->max_output_tokens, 'supports_streaming' => true, // Default to true for now 'supports_function_calling' => in_array($model->provider, ['openai', 'anthropic']), 'pricing' => [ 'input_per_1k' => round($model->input_price_per_million / 1000, 6), 'output_per_1k' => round($model->output_price_per_million / 1000, 6), 'currency' => 'USD', ], ]; }); // Get usage statistics for this provider $statistics = LlmRequest::where('gateway_user_id', $user->user_id) ->where('provider', $provider) ->where('status', 'success') ->selectRaw(' COUNT(*) as total_requests, SUM(total_cost) as total_cost, SUM(total_tokens) as total_tokens, MAX(created_at) as last_used ') ->first(); $response = [ 'data' => [ 'id' => $provider, 'name' => $this->getProviderName($provider), 'description' => $this->getProviderDescription($provider), 'status' => 'available', 'has_credentials' => $credential !== null, 'credentials_status' => $credential?->is_active ? 'active' : ($credential ? 'inactive' : null), 'credentials' => $credential ? [ 'api_key_format' => $this->getApiKeyFormat($provider), 'organization_id_required' => false, 'last_tested' => $credential->last_tested_at?->toIso8601String(), 'test_status' => $credential->test_status, ] : null, 'supported_features' => $this->getProviderFeatures($provider), 'api_documentation' => $this->getProviderDocUrl($provider), 'models' => $models, 'statistics' => [ 'total_requests' => $statistics->total_requests ?? 0, 'total_cost' => round($statistics->total_cost ?? 0, 4), 'total_tokens' => $statistics->total_tokens ?? 0, 'last_used' => $statistics->last_used?->toIso8601String(), ], ], ]; return response()->json($response); } /** * Get human-readable provider name */ private function getProviderName(string $provider): string { return match ($provider) { 'openai' => 'OpenAI', 'anthropic' => 'Anthropic', 'gemini' => 'Google Gemini', 'deepseek' => 'DeepSeek', 'mistral' => 'Mistral AI', default => ucfirst($provider), }; } /** * Get provider description */ private function getProviderDescription(string $provider): string { return match ($provider) { 'openai' => 'OpenAI GPT Models', 'anthropic' => 'Anthropic Claude Models', 'gemini' => 'Google Gemini Models', 'deepseek' => 'DeepSeek AI Models', 'mistral' => 'Mistral AI Models', default => "{$provider} Models", }; } /** * Get provider features */ private function getProviderFeatures(string $provider): array { $baseFeatures = ['chat']; // All providers support streaming $baseFeatures[] = 'streaming'; // OpenAI and Anthropic support function calling if (in_array($provider, ['openai', 'anthropic'])) { $baseFeatures[] = 'function_calling'; } return $baseFeatures; } /** * Get API key format hint */ private function getApiKeyFormat(string $provider): string { return match ($provider) { 'openai' => 'sk-...', 'anthropic' => 'sk-ant-...', 'gemini' => 'AI...', 'deepseek' => 'sk-...', 'mistral' => 'sk-...', default => 'API key', }; } /** * Get provider documentation URL */ private function getProviderDocUrl(string $provider): string { return match ($provider) { 'openai' => 'https://platform.openai.com/docs/api-reference', 'anthropic' => 'https://docs.anthropic.com/claude/reference', 'gemini' => 'https://ai.google.dev/docs', 'deepseek' => 'https://platform.deepseek.com/api-docs', 'mistral' => 'https://docs.mistral.ai/api', default => '#', }; } /** * Get model display name from model ID */ private function getModelDisplayName(string $modelId): string { // Convert model ID to a readable display name // e.g., "gpt-4-turbo" -> "GPT-4 Turbo" return ucwords(str_replace(['-', '_'], ' ', $modelId)); } }