CtrEditor/IA/gtpask.cs

421 lines
12 KiB
C#
Raw Normal View History

using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using LanguageDetection;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System.Diagnostics;
using CtrEditor.FuncionesBase;
namespace GTPCorrgir
{
public class Opciones
{
public enum LLM_a_Usar
{
OpenAI,
Ollama,
Groq,
Grok
}
public enum modoDeUso
{
Corregir,
Ortografia,
Traducir_a_Ingles,
Traducir_a_Italiano,
Traducir_a_Espanol,
}
public Dictionary<LLM_a_Usar, string> nombreLLM = new Dictionary<LLM_a_Usar, string>
{
{ LLM_a_Usar.Ollama, "Ollama" },
{ LLM_a_Usar.Groq, "Groq" },
{ LLM_a_Usar.Grok, "Grok" },
{ LLM_a_Usar.OpenAI, "OpenAI" },
};
public LLM_a_Usar LLM { get; set; }
public modoDeUso modo { get; set; }
public string nombreDeLLM()
{
return nombreLLM[LLM];
}
public Opciones() { } // Changed from private to public
}
public class ApiSettings
{
public class ApiKeySection
{
public string OpenAI { get; set; }
public string Groq { get; set; }
public string Grok { get; set; }
}
public ApiKeySection ApiKeys { get; set; }
}
public class gtpask : IDisposable
{
private string _openAiApiKey;
private string _groqApiKey;
private string _grokApiKey;
private readonly HttpClient _httpClient;
private bool _disposed;
public string IdiomaDetectado { get; private set; }
public string TextoACorregir { get; set; }
public string TextoCorregido { get; private set; }
public string TextodeSistema { get; set; }
public gtpask()
{
try
{
_httpClient = new HttpClient();
LoadApiKeys();
InitializeHttpClient();
}
catch (Exception ex)
{
throw new ApplicationException("Failed to initialize gtpask", ex);
}
}
private void LoadApiKeys()
{
try
{
string configPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "IA/appsettings.json");
if (!File.Exists(configPath))
{
throw new FileNotFoundException("Configuration file (appsettings.json) not found.");
}
string jsonContent = File.ReadAllText(configPath);
var settings = JsonConvert.DeserializeObject<ApiSettings>(jsonContent);
_openAiApiKey = settings?.ApiKeys?.OpenAI;
_groqApiKey = settings?.ApiKeys?.Groq;
_grokApiKey = settings?.ApiKeys?.Grok;
ValidateApiKeys();
}
catch (Exception ex)
{
throw new ApplicationException("Failed to load API keys", ex);
}
}
private void ValidateApiKeys()
{
var missingKeys = new List<string>();
if (string.IsNullOrEmpty(_openAiApiKey)) missingKeys.Add("OpenAI");
if (string.IsNullOrEmpty(_groqApiKey)) missingKeys.Add("Groq");
if (string.IsNullOrEmpty(_grokApiKey)) missingKeys.Add("Grok");
if (missingKeys.Any())
{
string missingKeysStr = string.Join(", ", missingKeys);
throw new ApplicationException($"Missing API keys: {missingKeysStr}");
}
}
private void InitializeHttpClient()
{
_httpClient.Timeout = TimeSpan.FromSeconds(30);
_httpClient.DefaultRequestHeaders.Clear();
_httpClient.DefaultRequestHeaders.Add("Accept", "application/json");
}
private bool DetectarIdioma()
{
try
{
IdiomaDetectado = Idiomas.DetectarIdioma(TextoACorregir);
return IdiomaDetectado != "Unknown";
}
catch
{
return false;
}
}
private async Task ProcesarTextoConLLM( Opciones Modelo)
{
try
{
string respuestaLLM;
switch (Modelo.LLM)
{
case Opciones.LLM_a_Usar.OpenAI:
respuestaLLM = await CallOpenAiApi();
break;
case Opciones.LLM_a_Usar.Ollama:
respuestaLLM = await CallOllamaApi();
break;
case Opciones.LLM_a_Usar.Groq:
respuestaLLM = await CallGroqAiApi();
break;
case Opciones.LLM_a_Usar.Grok:
respuestaLLM = await CallGrokApi();
break;
default:
throw new ArgumentException("LLM no válido");
}
if (string.IsNullOrEmpty(respuestaLLM))
{
throw new ApplicationException("No se recibió respuesta del LLM");
}
TextoCorregido = respuestaLLM;
}
catch (Exception ex)
{
throw;
}
}
private async Task SimularCorreccion()
{
await Task.Delay(1000);
TextoCorregido = "Texto simulado de prueba";
}
private async Task<string> CallGrokApi()
{
try
{
_httpClient.DefaultRequestHeaders.Clear();
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {_grokApiKey}");
var requestData = new
{
messages = new[]
{
new { role = "system", content = TextodeSistema },
new { role = "user", content = TextoACorregir }
},
model = "grok-beta",
stream = false,
temperature = 0
};
return await EnviarSolicitudLLM("https://api.x.ai/v1/chat/completions", requestData);
}
catch (Exception ex)
{
throw;
}
}
private async Task<string> CallOllamaApi()
{
try
{
var requestData = new
{
model = "llama3.2:latest",
messages = new[]
{
new { role = "system", content = TextodeSistema },
new { role = "user", content = TextoACorregir }
},
stream = false
};
return await EnviarSolicitudLLM("http://127.0.0.1:11434/api/chat", requestData);
}
catch (Exception ex)
{
throw;
}
}
private async Task<string> CallOpenAiApi()
{
try
{
_httpClient.DefaultRequestHeaders.Clear();
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {_openAiApiKey}");
var requestData = new
{
model = "gpt-4o-mini",
messages = new[]
{
new { role = "system", content = TextodeSistema },
new { role = "user", content = TextoACorregir }
}
};
return await EnviarSolicitudLLM("https://api.openai.com/v1/chat/completions", requestData);
}
catch (Exception ex)
{
throw;
}
}
private async Task<string> CallGroqAiApi()
{
try
{
_httpClient.DefaultRequestHeaders.Clear();
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {_groqApiKey}");
var requestData = new
{
model = "llama-3.2-3b-preview",
messages = new[]
{
new { role = "system", content = TextodeSistema },
new { role = "user", content = TextoACorregir }
},
max_tokens = 2048,
stream = false
};
return await EnviarSolicitudLLM("https://api.groq.com/openai/v1/chat/completions", requestData);
}
catch (Exception ex)
{
throw;
}
}
private async Task<string> EnviarSolicitudLLM(string endpoint, object requestData)
{
try
{
var content = new StringContent(
JsonConvert.SerializeObject(requestData),
Encoding.UTF8,
"application/json"
);
using var response = await _httpClient.PostAsync(endpoint, content);
var responseContent = await response.Content.ReadAsStringAsync();
if (!response.IsSuccessStatusCode)
{
throw new HttpRequestException(
$"Error en la solicitud HTTP: {response.StatusCode} - {responseContent}"
);
}
var data = JsonConvert.DeserializeObject<dynamic>(responseContent);
// Manejar diferentes formatos de respuesta según el LLM
if (endpoint.Contains("ollama"))
{
if (data.done == true && data.message != null)
{
return data.message.content;
}
throw new ApplicationException("Formato de respuesta de Ollama inválido");
}
else // OpenAI, Groq, Grok
{
if (data.choices != null && data.choices.Count > 0)
{
return data.choices[0].message.content;
}
throw new ApplicationException("No se encontró contenido en la respuesta del LLM");
}
}
catch (Exception ex)
{
throw;
}
}
public async Task CorregirTexto()
{
try
{
if (string.IsNullOrEmpty(TextoACorregir))
{
throw new ArgumentException("TextoACorregir cannot be null or empty");
}
if (string.IsNullOrEmpty(TextodeSistema))
{
throw new ArgumentException("TextodeSistema cannot be null or empty");
}
var opciones = new Opciones { LLM = Opciones.LLM_a_Usar.Grok };
await ProcesarTextoConLLM(opciones);
}
catch (Exception ex)
{
throw new LLMException("Error during text correction", ex);
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_httpClient?.Dispose();
}
_disposed = true;
}
}
~gtpask()
{
Dispose(false);
}
}
// Clase auxiliar para manejar excepciones específicas de la aplicación
public class LLMException : Exception
{
public LLMException(string message) : base(message) { }
public LLMException(string message, Exception innerException) : base(message, innerException) { }
}
// Clase auxiliar para validación
public static class Validations
{
public static void ValidateNotNull(object value, string paramName)
{
if (value == null)
{
throw new ArgumentNullException(paramName);
}
}
public static void ValidateNotNullOrEmpty(string value, string paramName)
{
if (string.IsNullOrEmpty(value))
{
throw new ArgumentException("Value cannot be null or empty", paramName);
}
}
}
}