Create the Conversation Thread
There are couple of headers to configure before loading up the URI and firing off the request. The first is Authorization header, this is simple Bearer token scheme with your project API key as your token. The second is a new header indicating we’re connecting to V2 of the Assistant Beta API.
_client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2");
response = _client.PostAsync("https://api.openai.com/v1/threads", null).Result;
So far, so good. Nothing most developers haven’t done dozens of times over. The tricky part is that comes via the response. Unfortunately, each end point returns a different JSON model. I’ve created a series of models in my project to deserialize each response into a POCO, which as this point, I feel that was overkill. I could have done this via JObjects and saved myself a few dozen lines of code.
var threadIdResponse = response.Content.ReadAsStringAsync().Result; if (!response.IsSuccessStatusCode) { var errorResponse = JsonConvert.DeserializeObject<ErrorResponse>(threadIdResponse); throw new AiClientException(errorResponse?.Error.Message); } var threadIdObj = JsonConvert.DeserializeObject<ThreadResponse>(threadIdResponse); _threadId = threadIdObj?.Id ?? string.Empty; return _threadId;
Here we’ve got the response, and it’s time to check and parse what we got back. In my error trap, I’ve got an exception called AiClientException. This is a new exception I created in the project that simply wraps Exception for better delineation on the client. If we’ve got a successful response, we deserialize it into a Thread Response object:
public class ThreadResponse { public string Id { get; set; } public string Object { get; set; } public long CreatedAt { get; set; } public object AssistantId { get; set; } public string ThreadId { get; set; } public object RunId { get; set; } public string Role { get; set; } public List<AiContent> Content { get; set; } public List<object> FileIds { get; set; } public Metadata Metadata { get; set; } }
Adding Messages
if (string.IsNullOrEmpty(_apiKey)) throw new AiClientException("OpenAI ApiKey is not set"); if (string.IsNullOrEmpty(_threadId)) CreateThread(); if (string.IsNullOrEmpty(message)) throw new AiClientException("Message is empty");
_client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2"); var messageRequest = new AiRequestMessage { Role = "user", Content = message };
public class AiRequestMessage { [JsonProperty("role")] public string Role { get; set; } [JsonProperty("content")] public string Content { get; set; } }
Once the message object is created, we just need to stringify it, load it into our request and send it off. There is not much useful information returned from the request. A HTTP 200 return is an indication that the message was successfully added:
var json = JsonConvert.SerializeObject(messageRequest); var content = new StringContent(json, Encoding.UTF8, "application/json"); response = await _client.PostAsync($"https://api.openai.com/v1/threads/{_threadId}/messages", content); var threadIdResponse = response.Content.ReadAsStringAsync().Result; if (!response.IsSuccessStatusCode) { var errorResponse = JsonConvert.DeserializeObject<ErrorResponse>(threadIdResponse); throw new AiClientException(errorResponse?.Error.Message); }
var messageRequest = new AiRequestMessage { Role = "user", Content = message };
var messageRequest = new AiRequestMessage { Role = "assistant", Content = message };
Run
_client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2");
var custAsst = new Assistant { assistant_id = _assistantId }; var json = JsonConvert.SerializeObject(custAsst); var content = new StringContent(json, Encoding.UTF8, "application/json");
response = await _client.PostAsync($"https://api.openai.com/v1/threads/{_threadId}/ru ns", content); var responseContent = await response.Content.ReadAsStringAsync(); var responseObj = JsonConvert.DeserializeObject<RunResponse>(responseContent); var runId = responseObj?.Id; var runStatus = responseObj?.Status; //if not completed, poll again if (runId != null) { while (runStatus != null && !FinalStatuses.Contains(runStatus)) { await Task.Delay(1000); response = await _client.GetAsync($"https://api.openai.com/v1/threads/{_threadId}/runs/{runId}"); responseContent = response.Content.ReadAsStringAsync().Result; responseObj = JsonConvert.DeserializeObject<RunResponse>(responseContent); runStatus = responseObj?.Status; } } } await GetResponse();
Get AI Response
HttpResponseMessage response; using (_client = new HttpClient()) { _client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v1"); response = await _client.GetAsync($"https://api.openai.com/v1/threads/{_threadId}/messages"); } The response that is returned from this request is more complex that we've seen up to this point and requires a bit more handling in order to extract the messages: var responseContent = response.Content.ReadAsStringAsync().Result; try { var data = JsonConvert.DeserializeObject<ChatResponse>(responseContent); _messages.Clear(); _messages = data?.Data.Select(x => new AiContent() { Type = x.Role, Text = x.Content[0].Text }).ToList() ?? new List<AiContent>(); } catch (Exception ex) { throw new AiClientException("Error retrieving messages"); }
public class ChatResponse { public List<Data> Data { get; set; } public string FirstId { get; set; } public string LastId { get; set; } public bool HasMore { get; set; } } public class Data { public string Id { get; set; } public string Object { get; set; } public long CreatedAt { get; set; } public string AssistantId { get; set; } public string ThreadId { get; set; } public string RunId { get; set; } public string Role { get; set; } public List<AiContent> Content { get; set; } public List<object> FileIds { get; set; } public Metadata Metadata { get; set; } }
public class AiContent { public string Type { get; set; } public Text Text { get; set; } } public class Text { public string Value { get; set; } public List<object> Annotations { get; set; } }
Furthering the Project
• Add photo generation – Who doesn’t like playing with the photo generator provided by most AIs?
Full File
using AiClients.Exceptions; using AiClients.Interfaces; using AiClients.Models; using Microsoft.Extensions.Configuration; using Newtonsoft.Json; using System.Net.Http.Headers; using System.Text; namespace CustomGptClient.Services { public class AssitantService : IAiService { private string _threadId; private IConfiguration _config; private string _apiKey; private string _assistantId; private List<AiContent> _messages; private string _assistantName; private HttpClient _client; private List<string> FinalStatuses = new List<string> { "completed", "failed", "cancelled", "expired" }; public AssitantService(IConfiguration configuration) { _config = configuration; _apiKey = _config.GetSection("OpenAI:ApiKey")?.Value ?? string.Empty; _assistantId = _config.GetSection("OpenAI:AssistantId")?.Value ?? string.Empty; _messages = new List<AiContent>(); } private string CreateThread() { if (string.IsNullOrEmpty(_apiKey)) throw new AiClientException("OpenAI ApiKey is not set"); HttpResponseMessage response; using (var _client = new HttpClient()) { _client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2"); response = _client.PostAsync("https://api.openai.com/v1/threads", null).Result; } var threadIdResponse = response.Content.ReadAsStringAsync().Result; if (!response.IsSuccessStatusCode) { var errorResponse = JsonConvert.DeserializeObject<ErrorResponse>(threadIdResponse); throw new AiClientException(errorResponse?.Error.Message); } var threadIdObj = JsonConvert.DeserializeObject<ThreadResponse>(threadIdResponse); _threadId = threadIdObj?.Id ?? string.Empty; return _threadId; } public async Task AddMessage(string message) { if (string.IsNullOrEmpty(_apiKey)) throw new AiClientException("OpenAI ApiKey is not set"); if (string.IsNullOrEmpty(_threadId)) CreateThread(); if (string.IsNullOrEmpty(message)) throw new AiClientException("Message is empty"); HttpResponseMessage response; using (_client = new HttpClient()) { _client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v1"); var messageRequest = new AiRequestMessage { Role = "user", Content = message }; var json = JsonConvert.SerializeObject(messageRequest); var content = new StringContent(json, Encoding.UTF8, "application/json"); response = await _client.PostAsync($"https://api.openai.com/v1/threads/{_threadId}/messages", content); } var threadIdResponse = response.Content.ReadAsStringAsync().Result; if (!response.IsSuccessStatusCode) { var errorResponse = JsonConvert.DeserializeObject<ErrorResponse>(threadIdResponse); throw new AiClientException(errorResponse?.Error.Message); } var threadIdObj = JsonConvert.DeserializeObject<ThreadResponse>(threadIdResponse); await CreateRun(); } public async Task CreateRun() { HttpResponseMessage response; using (_client = new HttpClient()) { _client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2"); var custAsst = new Assistant { assistant_id = _assistantId }; var json = JsonConvert.SerializeObject(custAsst); var content = new StringContent(json, Encoding.UTF8, "application/json"); response = await _client.PostAsync($"https://api.openai.com/v1/threads/{_threadId}/runs", content); var responseContent = await response.Content.ReadAsStringAsync(); var responseObj = JsonConvert.DeserializeObject<RunResponse>(responseContent); var runId = responseObj?.Id; var runStatus = responseObj?.Status; //if not completed, poll again if (runId != null) { while (runStatus != null && !FinalStatuses.Contains(runStatus)) { await Task.Delay(1000); response = await _client.GetAsync($"https://api.openai.com/v1/threads/{_threadId}/runs/{runId}"); responseContent = response.Content.ReadAsStringAsync().Result; responseObj = JsonConvert.DeserializeObject<RunResponse>(responseContent); runStatus = responseObj?.Status; } } } await GetResponse(); } public async Task GetResponse() { HttpResponseMessage response; using (_client = new HttpClient()) { _client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _apiKey); _client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v1"); response = await _client.GetAsync($"https://api.openai.com/v1/threads/{_threadId}/messages"); } var responseContent = response.Content.ReadAsStringAsync().Result; try { var data = JsonConvert.DeserializeObject<ChatResponse>(responseContent); _messages.Clear(); _messages = data?.Data.Select(x => new AiContent() { Type = x.Role, Text = x.Content[0].Text }).ToList() ?? new List<AiContent>(); } catch (Exception ex) { throw new AiClientException("Error retrieving messages"); } } }