Core Features: - Multi-provider support (OpenAI, Anthropic, DeepSeek, Gemini, Mistral) - Provider service architecture with abstract base class - Dynamic model discovery from provider APIs - Encrypted per-user provider credentials storage Admin Interface: - Complete admin panel with Livewire components - User management with CRUD operations - API key management with testing capabilities - Budget system with limits and reset schedules - Usage logs with filtering and CSV export - Model pricing management with cost calculator - Dashboard with Chart.js visualizations Database Schema: - MariaDB migrations for all tables - User provider credentials (encrypted) - LLM request logging - Budget tracking and rate limiting - Model pricing configuration API Implementation: - OpenAI-compatible endpoints - Budget checking middleware - Rate limit enforcement - Request logging jobs - Cost calculation service Testing: - Unit tests for all provider services - Provider factory tests - Cost calculator tests Documentation: - Admin user seeder - Model pricing seeder - Configuration files
89 lines
3.0 KiB
PHP
89 lines
3.0 KiB
PHP
<?php
|
|
|
|
namespace App\Jobs;
|
|
|
|
use App\Models\LlmRequest;
|
|
use Illuminate\Bus\Queueable;
|
|
use Illuminate\Contracts\Queue\ShouldQueue;
|
|
use Illuminate\Foundation\Bus\Dispatchable;
|
|
use Illuminate\Queue\InteractsWithQueue;
|
|
use Illuminate\Queue\SerializesModels;
|
|
use Illuminate\Support\Facades\Log;
|
|
|
|
class LogLlmRequest implements ShouldQueue
|
|
{
|
|
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;
|
|
|
|
public int $timeout = 30;
|
|
public int $tries = 3;
|
|
public int $maxExceptions = 3;
|
|
|
|
public function __construct(
|
|
private int $userId,
|
|
private string $provider,
|
|
private string $model,
|
|
private array $requestPayload,
|
|
private ?array $responsePayload,
|
|
private int $promptTokens,
|
|
private int $completionTokens,
|
|
private int $totalTokens,
|
|
private ?int $responseTimeMs,
|
|
private float $promptCost,
|
|
private float $completionCost,
|
|
private float $totalCost,
|
|
private string $status,
|
|
private ?string $errorMessage = null,
|
|
private ?int $httpStatus = null,
|
|
private ?string $ipAddress = null,
|
|
private ?string $userAgent = null,
|
|
private ?string $requestId = null
|
|
) {}
|
|
|
|
public function handle(): void
|
|
{
|
|
try {
|
|
LlmRequest::create([
|
|
'user_id' => $this->userId,
|
|
'provider' => $this->provider,
|
|
'model' => $this->model,
|
|
'request_payload' => $this->requestPayload,
|
|
'response_payload' => $this->responsePayload,
|
|
'prompt_tokens' => $this->promptTokens,
|
|
'completion_tokens' => $this->completionTokens,
|
|
'total_tokens' => $this->totalTokens,
|
|
'response_time_ms' => $this->responseTimeMs,
|
|
'prompt_cost' => $this->promptCost,
|
|
'completion_cost' => $this->completionCost,
|
|
'total_cost' => $this->totalCost,
|
|
'status' => $this->status,
|
|
'error_message' => $this->errorMessage,
|
|
'http_status' => $this->httpStatus,
|
|
'ip_address' => $this->ipAddress,
|
|
'user_agent' => $this->userAgent,
|
|
'request_id' => $this->requestId,
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('Failed to log LLM request', [
|
|
'error' => $e->getMessage(),
|
|
'user_id' => $this->userId,
|
|
'provider' => $this->provider,
|
|
'model' => $this->model,
|
|
'request_id' => $this->requestId,
|
|
]);
|
|
|
|
throw $e;
|
|
}
|
|
}
|
|
|
|
public function failed(\Throwable $exception): void
|
|
{
|
|
Log::critical('LogLlmRequest job failed after all retries', [
|
|
'user_id' => $this->userId,
|
|
'provider' => $this->provider,
|
|
'model' => $this->model,
|
|
'request_id' => $this->requestId,
|
|
'error' => $exception->getMessage(),
|
|
]);
|
|
}
|
|
}
|