130 lines
4.8 KiB
C#
130 lines
4.8 KiB
C#
using ChatServerSpace.Models;
|
|
using System.Net.Http.Headers;
|
|
using System.Text;
|
|
using System.Text.Json;
|
|
|
|
namespace ChatServerSpace.Services
|
|
{
|
|
public class DeepInfraChatService : IChatService
|
|
{
|
|
private readonly IChatHistoryService _historyService;
|
|
private readonly HttpClient _httpClient;
|
|
private readonly ILogger<DeepInfraChatService> _logger;
|
|
private readonly string _apiKey;
|
|
private readonly string _modelId;
|
|
private readonly string _endpoint;
|
|
|
|
public DeepInfraChatService(
|
|
IChatHistoryService historyService,
|
|
IConfiguration config,
|
|
ILogger<DeepInfraChatService> logger,
|
|
HttpClient httpClient)
|
|
{
|
|
_historyService = historyService;
|
|
_logger = logger;
|
|
_httpClient = httpClient;
|
|
|
|
_apiKey = config["ChatServer:ApiKey"] ?? throw new ArgumentNullException("ServeSpace:ApiKey configuration is missing");
|
|
_modelId = config["ChatServer:ModelId"] ?? "meta-llama/Meta-Llama-3.1-70B-Instruct";
|
|
_endpoint = config["ChatServer:Url"] ?? "https://api.deepinfra.com/v1/openai/chat/completions";
|
|
|
|
// Configurar o cliente HTTP
|
|
_httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey);
|
|
_httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
|
|
}
|
|
|
|
public async Task<ChatResponse> ProcessMessageAsync(ChatRequest request)
|
|
{
|
|
try
|
|
{
|
|
// Salvar mensagem do usuário no histórico
|
|
var userMessage = new ChatMessage
|
|
{
|
|
SessionId = request.SessionId,
|
|
Role = "user",
|
|
Content = request.Message
|
|
};
|
|
await _historyService.SaveMessageAsync(userMessage);
|
|
|
|
// Obter histórico de chat
|
|
var history = await _historyService.GetSessionHistoryAsync(request.SessionId);
|
|
|
|
// Criar mensagens para a API
|
|
var messages = history.Select(m => new
|
|
{
|
|
role = m.Role.ToLowerInvariant(),
|
|
content = m.Content
|
|
}).ToList();
|
|
|
|
// Criando o payload para a requisição
|
|
var payload = new
|
|
{
|
|
model = _modelId,
|
|
messages,
|
|
max_tokens = 8192,
|
|
temperature = 0.8,
|
|
top_p = 0.95
|
|
};
|
|
|
|
_logger.LogInformation("Enviando solicitação para a API DeepInfra: {0}", JsonSerializer.Serialize(payload));
|
|
|
|
// Enviando requisição para a API
|
|
var content = new StringContent(
|
|
JsonSerializer.Serialize(payload),
|
|
Encoding.UTF8,
|
|
"application/json");
|
|
|
|
_httpClient.Timeout = TimeSpan.FromSeconds(600);
|
|
var response = await _httpClient.PostAsync(_endpoint, content);
|
|
|
|
// Garantir que a resposta foi bem-sucedida
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
// Processar resposta
|
|
var responseContent = await response.Content.ReadAsStringAsync();
|
|
_logger.LogInformation("Resposta recebida da API: {0}", responseContent);
|
|
|
|
var apiResponse = JsonSerializer.Deserialize<DeepInfraResponse>(responseContent,
|
|
new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
|
|
|
var responseText = apiResponse?.Choices?.FirstOrDefault()?.Message?.Content ?? "Sem resposta";
|
|
|
|
// Salvar resposta do assistente no histórico
|
|
var assistantMessage = new ChatMessage
|
|
{
|
|
SessionId = request.SessionId,
|
|
Role = "assistant",
|
|
Content = responseText
|
|
};
|
|
await _historyService.SaveMessageAsync(assistantMessage);
|
|
|
|
return new ChatResponse
|
|
{
|
|
SessionId = request.SessionId,
|
|
Response = responseText
|
|
};
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "Erro ao processar mensagem com DeepInfra API");
|
|
throw new Exception("Erro ao se comunicar com a API DeepInfra", ex);
|
|
}
|
|
}
|
|
|
|
// Classes para deserialização da resposta da API
|
|
private class DeepInfraResponse
|
|
{
|
|
public List<DeepInfraChoice>? Choices { get; set; }
|
|
}
|
|
|
|
private class DeepInfraChoice
|
|
{
|
|
public DeepInfraMessage? Message { get; set; }
|
|
}
|
|
|
|
private class DeepInfraMessage
|
|
{
|
|
public string? Content { get; set; }
|
|
}
|
|
}
|
|
} |