diff --git a/CSharp/OpenAi/ChatGpt.cs b/CSharp/OpenAi/ChatGpt.cs new file mode 100644 index 0000000..201626d --- /dev/null +++ b/CSharp/OpenAi/ChatGpt.cs @@ -0,0 +1,63 @@ +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; + +namespace Temp.OpenAi; + +public sealed class ChatGpt +{ + private readonly HttpClient _httpClient; + private const string OpenAiSecret = "your secret api"; + private const string ApplicationJsonMediaTypeRequest = "application/json"; + private const string AcceptHeaderRequest = "Accept"; + private const string OpenAiApiBaseUrl = "https://api.openai.com/v1/"; + private readonly JsonSerializerOptions _serializerOptions = new() + { + PropertyNameCaseInsensitive = true + }; + public ChatGpt() + { + _httpClient = new HttpClient(); + _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", OpenAiSecret); + _httpClient.BaseAddress = new Uri(OpenAiApiBaseUrl); + _httpClient.DefaultRequestHeaders.Add(AcceptHeaderRequest, ApplicationJsonMediaTypeRequest); + } + + public async Task?> GetReasonsToMyBitch() + { + const string prompt = "Return only a CSV list separated by semicolons, of phrases with various reasons that justify " + + "my delay in leaving work, to my wife. Do not repeat this question in your response. " + + "Only the raw CSV. No double quotes. Just raw CSV"; + + return await DoRequest(prompt); + } + + public async Task?> GetExcusesToMyMates() + { + const string prompt = "Return only a CSV list separated by semicolons, of phrases with various reasons that " + + "justify why I can't go out for a drink with my friends. Do not repeat this question in " + + "your response. Only the raw CSV. No double quotes. Just raw CSV"; + + return await DoRequest(prompt); + } + + private async Task?> DoRequest(string prompt) + { + var promptJson = new CompletionChatRequest + { + Messages = new List + { + new() { Content = prompt } + } + }; + + var content = new StringContent(JsonSerializer.Serialize(promptJson), Encoding.UTF8, ApplicationJsonMediaTypeRequest); + var responseMessage = + await _httpClient.PostAsync("chat/completions", content).ConfigureAwait(false); + + var responseContent = await responseMessage.Content.ReadAsStringAsync().ConfigureAwait(false); + + var response = JsonSerializer.Deserialize(responseContent, _serializerOptions); + return response?.Content?.Split(";"); + } +} \ No newline at end of file diff --git a/CSharp/OpenAi/CompletionChatChoice.cs b/CSharp/OpenAi/CompletionChatChoice.cs new file mode 100644 index 0000000..b71e4c1 --- /dev/null +++ b/CSharp/OpenAi/CompletionChatChoice.cs @@ -0,0 +1,6 @@ +namespace Temp.OpenAi; + +public record struct CompletionChatChoice +{ + public CompletionChatMessage Message { get; set; } +} \ No newline at end of file diff --git a/CSharp/OpenAi/CompletionChatResponse.cs b/CSharp/OpenAi/CompletionChatResponse.cs new file mode 100644 index 0000000..f0e8d19 --- /dev/null +++ b/CSharp/OpenAi/CompletionChatResponse.cs @@ -0,0 +1,7 @@ +namespace Temp.OpenAi; + +public record CompletionChatResponse +{ + public CompletionChatChoice[] Choices { get; set; } + public string? Content => Choices.FirstOrDefault().Message.Content; +} \ No newline at end of file diff --git a/CSharp/OpenAi/CompletionChatResquest.cs b/CSharp/OpenAi/CompletionChatResquest.cs new file mode 100644 index 0000000..d92a4af --- /dev/null +++ b/CSharp/OpenAi/CompletionChatResquest.cs @@ -0,0 +1,18 @@ +using System.Runtime.Serialization; + +namespace Temp.OpenAi; + +public class CompletionChatRequest +{ + [DataMember(Name="model")] + public readonly string Model = "gpt-3.5-turbo"; + + [DataMember(Name="temperature")] + public readonly float Temperature = 1f; + + [DataMember(Name="max_tokens")] + public readonly int MaxTokens = 256; + + [DataMember(Name="messages")] + public IEnumerable? Messages { get; set; } +} \ No newline at end of file