From 9dde5c13dc1e033175dfbe0a25ad93c0634a59e0 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 16:57:35 -0700 Subject: [PATCH 01/24] Adding the ability to download a model by name instead of by Model object --- src/Client/IPiecesClient.cs | 15 ++++++++++ src/Client/PiecesClient.cs | 55 +++++++++++++++++++++++++++++++++---- 2 files changed, 64 insertions(+), 6 deletions(-) diff --git a/src/Client/IPiecesClient.cs b/src/Client/IPiecesClient.cs index db99373..a915f05 100644 --- a/src/Client/IPiecesClient.cs +++ b/src/Client/IPiecesClient.cs @@ -23,6 +23,21 @@ public interface IPiecesClient /// Task DownloadModelAsync(Model model, CancellationToken cancellationToken = default); + /// + /// Downloads an offline model based off the name. Models are found using a case insensitive comparison + /// finding the first model with a name that contains the given model name. + /// + /// For example, if you use GPT-4o as the model name, it will match GPT-4o Mini Chat Model. + /// + /// If the model is not found, a is thrown. + /// + /// If the model is not offline, this just returns the model. + /// If the model is an offline model, and is already downloaded, this just returns the model + /// + /// The name model to download + /// + Task DownloadModelAsync(string modelName, CancellationToken cancellationToken = default); + /// /// Get the Pieces OS version /// diff --git a/src/Client/PiecesClient.cs b/src/Client/PiecesClient.cs index 74d9fd9..476805c 100644 --- a/src/Client/PiecesClient.cs +++ b/src/Client/PiecesClient.cs @@ -78,7 +78,7 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli conversationsApi = new ConversationsApi(apiClient, apiClient, configuration); rangesApi = new RangesApi(apiClient, apiClient, configuration); connectorApi = new ConnectorApi(apiClient, apiClient, configuration); - + qgptWebSocket = new WebSocketBackgroundClient(); var qgptUrlBuilder = new UriBuilder(webSocketBaseUrl) { @@ -111,9 +111,8 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli logger?.LogInformation("Web sockets started"); - // Get all the models to pick a default - choose GPT-4o mini if it is available - var models = modelsApi.ModelsSnapshot().Iterable; - var defaultModel = models.FirstOrDefault(x => x.Name.Contains("GPT-4o")) ?? models.First(); + // Get all the models to pick a default - choose GPT-4o if it is available + var defaultModel = GetModelFromName("GPT-4o"); copilot = new PiecesCopilot(logger, defaultModel, application!, qgptWebSocket, conversationsApi, rangesApi); assets = new PiecesAssets(logger, application!, assetApi, assetsApi); @@ -121,6 +120,33 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli this.logger = logger; } + /// + /// Gets the first model that contains the given name. + /// If no model matches, the first is returned. + /// + /// The search string for the model name + /// If false and the model is not found, return the first model. Otherwise throw + /// + private Model GetModelFromName(string modelName, bool throwIfNotFound = false) + { + var models = modelsApi.ModelsSnapshot().Iterable; + var matchModel = models.FirstOrDefault(x => x.Name.Contains(modelName, StringComparison.OrdinalIgnoreCase)); + + if (matchModel == null) + { + if (throwIfNotFound) + { + throw new PiecesClientException($"Model {modelName} not found"); + } + else + { + matchModel = models.First(); + } + } + + return matchModel; + } + private async Task EnsureConnected() => await webSocketTask.ConfigureAwait(false); /// @@ -204,9 +230,26 @@ public async Task DownloadModelAsync(Model model, CancellationToken cance } // Now load the model - var loadedModel = await modelApi.ModelSpecificModelLoadAsync(model.Id, cancellationToken: cancellationToken).ConfigureAwait(false); + return await modelApi.ModelSpecificModelLoadAsync(model.Id, cancellationToken: cancellationToken).ConfigureAwait(false); + } - return loadedModel; + /// + /// Downloads an offline model based off the name. Models are found using a case insensitive comparison + /// finding the first model with a name that contains the given model name. + /// + /// For example, if you use GPT-4o as the model name, it will match GPT-4o Mini Chat Model. + /// + /// If the model is not found, a is thrown. + /// + /// If the model is not offline, this just returns the model. + /// If the model is an offline model, and is already downloaded, this just returns the model + /// + /// The name model to download + /// + public async Task DownloadModelAsync(string modelName, CancellationToken cancellationToken = default) + { + var model = GetModelFromName(modelName, true); + return await DownloadModelAsync(model, cancellationToken).ConfigureAwait(false); } /// From 7bf80de0ce299a230e18c4d14f1d15000920c85c Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 16:58:12 -0700 Subject: [PATCH 02/24] Adding seeds to chats --- README.md | 22 +++- src/Client/Copilot/IPiecesCopilot.cs | 30 ++++- src/Client/Copilot/PiecesCopilot.cs | 42 ++++++- src/Example/Program.cs | 169 ++++++++++++++++++++++++++- 4 files changed, 255 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index f2483e2..87a0db6 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ The Pieces OS Client SDK is a powerful code engine package designed for writing applications on top of Pieces OS. It facilitates communication with a locally hosted server to enable features such as copilot chats, asset saving, and more. ## Features + The Pieces SDK offers the following key features: 1. Copilot Chats: Communicate seamlessly with copilot chats functionality. @@ -21,7 +22,6 @@ The Pieces SDK offers the following key features: 1. Local Server Interaction: Interact with a locally hosted server for various functionality. 1. Multi LLMs support: Use any Pieces supported LLM to power your app. - ## Installation To get started with the Pieces OS Client SDK, follow these steps: @@ -80,14 +80,13 @@ Once you have the `IPiecesCopilot`, you can ask questions, and get the response This will create a new copilot chat that you will be able to see in other Pieces applications, such as Pieces Desktop, or Pieces for Visual Studio Code. This chat will be named `"C# question on async tasks"`, and you will see this name in other Pieces applications. The chat will ask a question, then return the full answer to the console once it has the complete answer. - ```csharp var chat = await copilot.CreateChatAsync("C# question on async tasks").ConfigureAwait(false); Console.WriteLine(await chat.AskQuestionAsync("What does the async keyword do in C#?")); ``` -#### create a chat and ask a question, then a follow up question: +#### Create a chat and ask a question, then a follow up question: This example creates a copilot chat, and asks 2 connected questions, showing the response from each once it has been completely generated. @@ -98,7 +97,22 @@ Console.WriteLine(await chat2.AskQuestionAsync("What does the async keyword do i Console.WriteLine(await chat2.AskQuestionAsync("Give me an example using it with an HTTP call?").ConfigureAwait(false)); ``` -#region Example 3 - stream the response +#### Create a chat with seeded messages: + +This example shows how to seed a conversation with a set of messages that are used in the conversation + +```csharp +var seeds = new List{ + new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), + new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), + new(QGPTConversationMessageRoleEnum.ASSISTANT, "To make a web request in a programming language, you typically use an HTTP client library."), +}; + +var chat = await copilot.CreateSeededChatAsync("C# web requests", + seeds: seeds).ConfigureAwait(false); + +Console.WriteLine(await chat.AskQuestionAsync("What about in C#?").ConfigureAwait(false)); +``` #### Stream the response diff --git a/src/Client/Copilot/IPiecesCopilot.cs b/src/Client/Copilot/IPiecesCopilot.cs index 1ad5b75..d250226 100644 --- a/src/Client/Copilot/IPiecesCopilot.cs +++ b/src/Client/Copilot/IPiecesCopilot.cs @@ -2,6 +2,13 @@ namespace Pieces.OS.Client.Copilot; using Pieces.Os.Core.SdkModel; +/// +/// A record for seed messages for conversations +/// +/// +/// +public record SeedMessage(QGPTConversationMessageRoleEnum Role, string Message); + public interface IPiecesCopilot { /// @@ -18,7 +25,28 @@ public interface IPiecesCopilot /// Should this chat use live context or not /// A cancellation token /// The new chat - Task CreateChatAsync(string chatName = "", IEnumerable? assetIds = null, Model? model = default, bool useLiveContext = false, CancellationToken cancellationToken = default); + Task CreateChatAsync(string chatName = "", + IEnumerable? assetIds = null, + Model? model = default, + bool useLiveContext = false, + CancellationToken cancellationToken = default); + + /// + /// Create a new chat with the copilot seeded with messages + /// + /// An optional name for the chat. If nothing is provided, the name will be New conversation + /// A set of seeded messages for the conversation + /// An optional list of asset Ids to add to the chat + /// The LLM model to use + /// Should this chat use live context or not + /// A cancellation token + /// The new chat + Task CreateSeededChatAsync(string chatName = "", + IEnumerable? seeds = null, + IEnumerable? assetIds = null, + Model? model = default, + bool useLiveContext = false, + CancellationToken cancellationToken = default); /// /// Get all the chats with the current copilot diff --git a/src/Client/Copilot/PiecesCopilot.cs b/src/Client/Copilot/PiecesCopilot.cs index 3b4c65f..b4d5516 100644 --- a/src/Client/Copilot/PiecesCopilot.cs +++ b/src/Client/Copilot/PiecesCopilot.cs @@ -47,11 +47,37 @@ public IEnumerable Chats /// Should this chat use live context or not /// A cancellation token /// The new chat - public async Task CreateChatAsync(string chatName = "", + public Task CreateChatAsync(string chatName = "", IEnumerable? assetIds = null, Model? model = default, bool useLiveContext = false, CancellationToken cancellationToken = default) + { + return CreateSeededChatAsync(chatName: chatName, + seeds: null, + assetIds: assetIds, + model: model, + useLiveContext: useLiveContext, + cancellationToken: cancellationToken); + } + + + /// + /// Create a new chat with the copilot seeded with messages + /// + /// An optional name for the chat. If nothing is provided, the name will be New conversation + /// A set of seeded messages for the conversation + /// An optional list of asset Ids to add to the chat + /// The LLM model to use + /// Should this chat use live context or not + /// A cancellation token + /// The new chat + public async Task CreateSeededChatAsync(string chatName = "", + IEnumerable? seeds = null, + IEnumerable? assetIds = null, + Model? model = default, + bool useLiveContext = false, + CancellationToken cancellationToken = default) { chatName = string.IsNullOrWhiteSpace(chatName) ? "New Conversation" : chatName; @@ -85,8 +111,22 @@ public async Task CreateChatAsync(string chatName = "", flattenedAssets = new FlattenedAssets(iterable: referencedAssets); } + // If we have any seeds, use them + List? conversationMessages = null; + if (seeds is not null && seeds.Any()) + { + conversationMessages = seeds.Select(s => + { + var fragment = new FragmentFormat(varString: new TransferableString(raw: s.Message)); + return new SeededConversationMessage(model: model, + role: s.Role, + fragment: fragment); + }).ToList(); + } + var seededConversation = new SeededConversation(type: ConversationTypeEnum.COPILOT, name: chatName, + messages: conversationMessages, pipeline: pipeline, assets: flattenedAssets); var conversation = await conversationsApi.ConversationsCreateSpecificConversationAsync( diff --git a/src/Example/Program.cs b/src/Example/Program.cs index 81e36e4..72de4f7 100644 --- a/src/Example/Program.cs +++ b/src/Example/Program.cs @@ -3,8 +3,9 @@ /// You can find each example in a region below. These examples are commented out, so uncomment each one to run it. using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; - +using Pieces.Os.Core.SdkModel; using Pieces.OS.Client; +using Pieces.OS.Client.Copilot; // Set up logging var services = new ServiceCollection(); @@ -254,4 +255,168 @@ // Console.Write(token); // } -#endregion Example 8 - Change the model to a local model \ No newline at end of file +#endregion Example 8 - Change the model to a local model + +#region Example 9 - Seed a conversation + +// Example 9 - Seed a conversation +// +// This example shows how to seed a conversation with a set of messages +// That are used in the conversation + +// var seeds = new List{ +// new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), +// new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), +// new(QGPTConversationMessageRoleEnum.ASSISTANT, @"To make a web request in a programming language, you typically use an HTTP client library. Below are examples in a few popular languages: + +// ### Python +// You can use the `requests` library to make web requests in Python. + +// ```python +// import requests + +// response = requests.get('https://api.example.com/data') +// if response.status_code == 200: +// print(response.json()) +// else: +// print(f'Error: {response.status_code}') +// ``` + +// ### JavaScript (using Fetch API) +// In JavaScript, you can use the Fetch API to make web requests. + +// ```javascript +// fetch('https://api.example.com/data') +// .then(response => { +// if (!response.ok) { +// throw new Error('Network response was not ok ' + response.statusText); +// } +// return response.json(); +// }) +// .then(data => console.log(data)) +// .catch(error => console.error('There was a problem with the fetch operation:', error)); +// ``` + +// ### Java (using HttpURLConnection) +// In Java, you can use `HttpURLConnection` to make web requests. + +// ```java +// import java.io.BufferedReader; +// import java.io.InputStreamReader; +// import java.net.HttpURLConnection; +// import java.net.URL; + +// public class WebRequestExample { +// public static void main(String[] args) { +// try { +// URL url = new URL(""https://api.example.com/data\""); +// HttpURLConnection conn = (HttpURLConnection) url.openConnection(); +// conn.setRequestMethod(""GET""); + +// int responseCode = conn.getResponseCode(); +// if (responseCode == HttpURLConnection.HTTP_OK) { +// BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); +// String inputLine; +// StringBuilder response = new StringBuilder(); + +// while ((inputLine = in.readLine()) != null) { +// response.append(inputLine); +// } +// in.close(); +// System.out.println(response.toString()); +// } else { +// System.out.println(""GET request failed: "" + responseCode); +// } +// } catch (Exception e) { +// e.printStackTrace(); +// } +// } +// } +// ``` + +// ### C# (using HttpClient) +// In C#, you can use `HttpClient` to make web requests. + +// ```csharp +// using System; +// using System.Net.Http; +// using System.Threading.Tasks; + +// class Program +// { +// static async Task Main() +// { +// using (HttpClient client = new HttpClient()) +// { +// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); +// if (response.IsSuccessStatusCode) +// { +// string data = await response.Content.ReadAsStringAsync(); +// Console.WriteLine(data); +// } +// else +// { +// Console.WriteLine($""Error: {response.StatusCode}""); +// } +// } +// } +// } +// ``` + +// Choose the example that fits the programming language you are using!"), +// new(QGPTConversationMessageRoleEnum.USER, "I am using C#"), +// new(QGPTConversationMessageRoleEnum.ASSISTANT, @"Great! Since you're using C#, you can use the `HttpClient` class to make web requests. Here's a simple example of how to perform a GET request: + +// ```csharp +// using System; +// using System.Net.Http; +// using System.Threading.Tasks; + +// class Program +// { +// static async Task Main() +// { +// using (HttpClient client = new HttpClient()) +// { +// try +// { +// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); +// if (response.IsSuccessStatusCode) +// { +// string data = await response.Content.ReadAsStringAsync(); +// Console.WriteLine(data); +// } +// else +// { +// Console.WriteLine($""Error: {response.StatusCode}""); +// } +// } +// catch (Exception e) +// { +// Console.WriteLine($""Exception occurred: {e.Message}""); +// } +// } +// } +// } +// ``` + +// ### Explanation: +// - **HttpClient**: This class is used to send HTTP requests and receive HTTP responses from a resource identified by a URI. +// - **GetAsync**: This method sends a GET request to the specified URI. +// - **IsSuccessStatusCode**: This property checks if the response status code indicates success (2xx). +// - **ReadAsStringAsync**: This method reads the response content as a string asynchronously. + +// Make sure to replace `""https://api.example.com/data""` with the actual URL you want to request. If you have any specific requirements or questions, feel free to ask!"), +// }; + +// var chat9 = await copilot.CreateSeededChatAsync("Question on async tasks", seeds: seeds).ConfigureAwait(false); + +// var question9 = "Comment this code"; +// var response9 = await chat9.AskQuestionAsync(question9); + +// Console.WriteLine(question9); +// Console.WriteLine(); +// Console.WriteLine(response9); +// Console.WriteLine(); + +#endregion Example 9 - Seed a conversation \ No newline at end of file From 9b4adef7e2d4a638b8bd4c50678e713bc2918e73 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 17:04:38 -0700 Subject: [PATCH 03/24] Renaming examples --- README.md | 4 ++-- .../Pieces.OS.Client.Example.csproj} | 14 +++++++------- src/{Example => Client.Example}/Program.cs | 0 3 files changed, 9 insertions(+), 9 deletions(-) rename src/{Example/Pieces.OS.Example.csproj => Client.Example/Pieces.OS.Client.Example.csproj} (97%) rename src/{Example => Client.Example}/Program.cs (100%) diff --git a/README.md b/README.md index 87a0db6..3cda67b 100644 --- a/README.md +++ b/README.md @@ -37,9 +37,9 @@ To get started with the Pieces OS Client SDK, follow these steps: dotnet add package Pieces.OS.Client ``` -## Examples +## Pieces Client Examples -There is an example project in the [`./src/Example`](./src/Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Example`](./src/Example) folder. +There is an example project using the Pieces Client in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. ### Set up the SDK diff --git a/src/Example/Pieces.OS.Example.csproj b/src/Client.Example/Pieces.OS.Client.Example.csproj similarity index 97% rename from src/Example/Pieces.OS.Example.csproj rename to src/Client.Example/Pieces.OS.Client.Example.csproj index b1c453d..eb295f4 100644 --- a/src/Example/Pieces.OS.Example.csproj +++ b/src/Client.Example/Pieces.OS.Client.Example.csproj @@ -1,14 +1,14 @@  - - - + + + - - - - + + + + diff --git a/src/Example/Program.cs b/src/Client.Example/Program.cs similarity index 100% rename from src/Example/Program.cs rename to src/Client.Example/Program.cs From 144a31cc52371a5129b4f7d253575dfac851c384 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 17:24:27 -0700 Subject: [PATCH 04/24] Moving client example --- .vscode/launch.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index ba311b0..5bbba74 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,10 +5,10 @@ "version": "0.2.0", "configurations": [ { - "name": "C#: Example Debug", + "name": "Client Example Debug", "type": "dotnet", "request": "launch", - "projectPath": "${workspaceFolder}/src/Example/Pieces.OS.Example.csproj" + "projectPath": "${workspaceFolder}/src/Client.Example/Pieces.OS.Client.Example.csproj" } ] From 0403a0f62caa92e74d5b8d712ea7a10f13829144 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 17:57:51 -0700 Subject: [PATCH 05/24] Whitespace --- src/Client.Example/Program.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Client.Example/Program.cs b/src/Client.Example/Program.cs index 72de4f7..2d951d1 100644 --- a/src/Client.Example/Program.cs +++ b/src/Client.Example/Program.cs @@ -161,6 +161,7 @@ // Example 6 - Create an asset and use it in a copilot chat // // This example creates a new asset. It then uses it in a copilot chat asking a question about it. + // var assetCode = @"using System; // class Program From 266c193e27cac456dbe59fd78cce2a4488daee89 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 8 Oct 2024 17:58:09 -0700 Subject: [PATCH 06/24] Adding Pieces.Extensions for Microsoft.Extensions.AI support --- .github/workflows/publish-nuget.yaml | 2 +- .vscode/launch.json | 7 +- Pieces.OS.Client.sln | 16 +- .../Pieces.Extensions.AI.Example.csproj | 22 ++ src/Extensions.Example/Program.cs | 136 ++++++++++++ src/Extensions/Pieces.Extensions.AI.csproj | 67 ++++++ src/Extensions/PiecesChatClient.cs | 207 ++++++++++++++++++ 7 files changed, 454 insertions(+), 3 deletions(-) create mode 100644 src/Extensions.Example/Pieces.Extensions.AI.Example.csproj create mode 100644 src/Extensions.Example/Program.cs create mode 100644 src/Extensions/Pieces.Extensions.AI.csproj create mode 100644 src/Extensions/PiecesChatClient.cs diff --git a/.github/workflows/publish-nuget.yaml b/.github/workflows/publish-nuget.yaml index bba8e71..5e95649 100644 --- a/.github/workflows/publish-nuget.yaml +++ b/.github/workflows/publish-nuget.yaml @@ -6,7 +6,7 @@ on: - '*' jobs: - publish-to-nuget: + publish-client-to-nuget: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.vscode/launch.json b/.vscode/launch.json index 5bbba74..1255a11 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,7 +9,12 @@ "type": "dotnet", "request": "launch", "projectPath": "${workspaceFolder}/src/Client.Example/Pieces.OS.Client.Example.csproj" + }, + { + "name": "Extensions Example Debug", + "type": "dotnet", + "request": "launch", + "projectPath": "${workspaceFolder}/src/Extensions.Example/Pieces.Extensions.AI.Example.csproj" } - ] } \ No newline at end of file diff --git a/Pieces.OS.Client.sln b/Pieces.OS.Client.sln index 75586ff..47cdcff 100644 --- a/Pieces.OS.Client.sln +++ b/Pieces.OS.Client.sln @@ -19,7 +19,11 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.Os.Core.Test", "src\ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.OS.Client", "src\Client\Pieces.OS.Client.csproj", "{7B7F08A9-0F79-4902-8461-7EC8CC0637ED}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.OS.Example", "src\Example\Pieces.OS.Example.csproj", "{9B392DE8-CC81-4B68-AF1F-64909DA5B9CF}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.OS.Client.Example", "src\Client.Example\Pieces.OS.Client.Example.csproj", "{9B392DE8-CC81-4B68-AF1F-64909DA5B9CF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.Extensions.AI", "src\Extensions\Pieces.Extensions.AI.csproj", "{14EEEFCD-42A3-4C85-AED5-9C00F0D53811}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.Extensions.AI.Example", "src\Extensions.Example\Pieces.Extensions.AI.Example.csproj", "{444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -46,6 +50,14 @@ Global {9B392DE8-CC81-4B68-AF1F-64909DA5B9CF}.Debug|Any CPU.Build.0 = Debug|Any CPU {9B392DE8-CC81-4B68-AF1F-64909DA5B9CF}.Release|Any CPU.ActiveCfg = Release|Any CPU {9B392DE8-CC81-4B68-AF1F-64909DA5B9CF}.Release|Any CPU.Build.0 = Release|Any CPU + {14EEEFCD-42A3-4C85-AED5-9C00F0D53811}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {14EEEFCD-42A3-4C85-AED5-9C00F0D53811}.Debug|Any CPU.Build.0 = Debug|Any CPU + {14EEEFCD-42A3-4C85-AED5-9C00F0D53811}.Release|Any CPU.ActiveCfg = Release|Any CPU + {14EEEFCD-42A3-4C85-AED5-9C00F0D53811}.Release|Any CPU.Build.0 = Release|Any CPU + {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {63B9A93A-AD4D-42C6-9A80-68853FAC4913} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} @@ -56,5 +68,7 @@ Global {15767DC5-A2E1-45FB-A7A8-914E7C9A5C9E} = {FE52B15F-09C2-4D8A-952A-3ACB8C449EAA} {7B7F08A9-0F79-4902-8461-7EC8CC0637ED} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} {9B392DE8-CC81-4B68-AF1F-64909DA5B9CF} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} + {14EEEFCD-42A3-4C85-AED5-9C00F0D53811} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} + {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} EndGlobalSection EndGlobal diff --git a/src/Extensions.Example/Pieces.Extensions.AI.Example.csproj b/src/Extensions.Example/Pieces.Extensions.AI.Example.csproj new file mode 100644 index 0000000..7c86c6a --- /dev/null +++ b/src/Extensions.Example/Pieces.Extensions.AI.Example.csproj @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + Exe + net8.0 + enable + enable + + + diff --git a/src/Extensions.Example/Program.cs b/src/Extensions.Example/Program.cs new file mode 100644 index 0000000..08f4aaa --- /dev/null +++ b/src/Extensions.Example/Program.cs @@ -0,0 +1,136 @@ +/// This file contains some examples for using the Pieces .NET SDK +/// +/// You can find each example in a region below. These examples are commented out, so uncomment each one to run it. +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Pieces.Extensions.AI; +using Pieces.OS.Client; + +// Set up logging +var services = new ServiceCollection(); +services.AddLogging(builder =>builder.AddConsole()); +var serviceProvider = services.BuildServiceProvider(); +var logger = serviceProvider.GetRequiredService>(); + +// Create the Pieces client +IPiecesClient client = new PiecesClient(logger); +var assets = await client.GetAssetsAsync().ConfigureAwait(false); + +// Create a Chat completion +IChatClient chatClient = new PiecesChatClient(client); + +#region Example 1 - a continuous conversation + +// This example shows a continuous conversation. It starts by setting the system prompt, +// then adds a user message, sends this to the chat completion, adds the response to the messages +// then adds another user message and so on. This shows how to build up a conversation. + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.System, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), +// }; + +// // Add the first user message +// chatMessages.Add(new ChatMessage(ChatRole.User, "How can I make a web request")); + +// // Ask the question +// var response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// // Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.User, response.Message!.Text)); + +// // Ask the next question, but change the model +// chatMessages.Add(new ChatMessage(ChatRole.User, "I want to do this in C#")); + +// var options = new ChatOptions() +// { +// ModelId = "Claude 3.5 Sonnet" +// }; +// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// // Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.User, response.Message!.Text)); + +// // Ask the next question +// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); + +// // This time stream the response +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } + +// Console.WriteLine(); + +#endregion Example 1 - a continuous conversation + +#region Example 2 - live context + +// This example shows how to use live context in a chat completion via the Additional Properties dictionary. +// To run this, read this GitHub issue in your browser before running this: https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/issues/8 +// var chatMessages = new List{ +// new ChatMessage(ChatRole.User, "Describe the Add support for Microsoft.Extensions.AI github issue I was just reading about in my browser") +// }; + +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "LiveContext", true }, +// { "LiveContextTimeSpan", TimeSpan.FromHours(1) } +// } +// }; + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } + +// Console.WriteLine(); + +#endregion Example 2 - live context + +#region Example 3 - Create an asset and use it in a chat + +// This example creates a new asset. It then uses it in a chat asking a question about it. +var assetCode = @"from pieces_os_client.wrapper import PiecesClient + +pieces_client = PiecesClient() + +# Set the question you want to ask +question = 'What is Object-Oriented Programming?' + +# Ask the question and stream the response +for response in pieces_client.copilot.stream_question(question): + if response.question: + # Each answer is a chunk of the entire response to the question + answers = response.question.answers.iterable + for answer in answers: + print(answer.text,end='') + +# Close the client +pieces_client.close() +"; + +var newAsset = await assets.CreateAssetAsync(assetCode).ConfigureAwait(false); +Console.WriteLine($"Asset created = name {newAsset.Name}, id: {newAsset.Id}"); + +var options = new ChatOptions() +{ + AdditionalProperties = new AdditionalPropertiesDictionary{ + { "AssetIds", new List{newAsset.Id} } + } +}; + +var chatMessages = new List{ + new(ChatRole.User, "Describe this program") +}; + +await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +{ + Console.Write(r.Text); +} + +#endregion Example 3 - assets \ No newline at end of file diff --git a/src/Extensions/Pieces.Extensions.AI.csproj b/src/Extensions/Pieces.Extensions.AI.csproj new file mode 100644 index 0000000..1454425 --- /dev/null +++ b/src/Extensions/Pieces.Extensions.AI.csproj @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + net8.0 + enable + enable + true + + + + Pieces.Extensions.AI + pieces-circle.png + 2024 Mesh Intelligent Technologies, Inc + Pieces for Devlopers + Pieces for Devlopers + Support for the Microsoft AI Extensions using Pieces for Developers + MIT + copilot ai llm artificial intelligence code developer extensions + https://pieces.app + https://github.com/pieces-app/pieces-os-client-sdk-for-csharp.git + git + README.md + + + + + + + + + $(TargetsForTfmSpecificBuildOutput);CopyProjectReferencesToPackage + + + + + + <_ReferenceCopyLocalPaths Include="@(ReferenceCopyLocalPaths->WithMetadataValue('ReferenceSourceTarget', 'ProjectReference')->WithMetadataValue('PrivateAssets', 'All'))" /> + + + + + + + + + + + + \ No newline at end of file diff --git a/src/Extensions/PiecesChatClient.cs b/src/Extensions/PiecesChatClient.cs new file mode 100644 index 0000000..a5f9673 --- /dev/null +++ b/src/Extensions/PiecesChatClient.cs @@ -0,0 +1,207 @@ +using System.Runtime.CompilerServices; +using Microsoft.Extensions.AI; +using Pieces.Os.Core.SdkModel; +using Pieces.OS.Client; +using Pieces.OS.Client.Copilot; + +namespace Pieces.Extensions.AI; + +public class PiecesChatClient(IPiecesClient piecesClient, Model? model = null) : IChatClient +{ + private readonly IPiecesClient piecesClient = piecesClient; + private Model? model = model; + private IPiecesCopilot? piecesCopilot; + + /// + public ChatClientMetadata Metadata => new ChatClientMetadata("Pieces for Developers", new Uri("https://pieces.app"), (model?.Name) ?? "Unknown model"); + + // + // Summary: + // Sends chat messages to the model and returns the response messages. + // + // Parameters: + // chatMessages: + // The chat content to send. + // + // options: + // The chat options to configure the request. To use Pieces specific features, set the following + // in the AdditionalProperties collection: + // + // ["LiveContext"] = true/false; // set to true to use live context. Default to false. + // ["LiveContextTimeSpan"] = TimeSpan?; // The timespan to use for live context. Defaults to 15 minutes if not set. + // ["AssetIds"] = []; // Set to an enumerable of asset ids to use saved assets in the chat. Default to none. + // + // cancellationToken: + // The System.Threading.CancellationToken to monitor for cancellation requests. + // The default is System.Threading.CancellationToken.None. + // + // Returns: + // The response messages generated by the client. + // + // Remarks: + // The returned messages will not have been added to chatMessages. However, any + // intermediate messages generated implicitly by the client, including any messages + // for roundtrips to the model as part of the implementation of this request, will + // be included. + public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + // Build the seeds + var seeds = GetSeedsFromChatMessages(chatMessages); + // Check that the last message is from the user - if not we don't have a question to ask. + var last = seeds.Last(); + // Remove the last, as this is passed as the question + seeds.Remove(last); + + // Create the chat + var chat = await CreateChatAsync(options, seeds, cancellationToken).ConfigureAwait(false); + + // Ask the question + var response = await chat.AskQuestionAsync(last.Message, + liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), + cancellationToken: cancellationToken).ConfigureAwait(false); + + // Build the response + return new ChatCompletion([new ChatMessage(ChatRole.Assistant, response)]) + { + ModelId = chat.Model.Id, + FinishReason = ChatFinishReason.Stop, + CompletionId = chat.Id, + }; + } + + private static IEnumerable? GetAssetIdsFromOptions(ChatOptions? options) + { + if (options is not null && options.AdditionalProperties is not null) + { + if (options!.AdditionalProperties!.TryGetValue("AssetIds", out object? assetIdsVal)) + { + return assetIdsVal as IEnumerable; + } + } + + return null; + } + + private static bool GetLiveContextFromOptions(ChatOptions? options) + { + if (options is not null && options.AdditionalProperties is not null) + { + if (options!.AdditionalProperties!.TryGetValue("LiveContext", out object? liveContextVal)) + { + return liveContextVal is bool v && v; + } + } + + return false; + } + + private static TimeSpan? GetLiveContextTimeSpanFromOptions(ChatOptions? options) + { + if (options is not null && options.AdditionalProperties is not null) + { + if (options!.AdditionalProperties!.TryGetValue("LiveContextTimeSpan", out object? liveContextTimeSpanVal)) + { + return liveContextTimeSpanVal as TimeSpan?; + } + } + + return null; + } + + private static List GetSeedsFromChatMessages(IList chatMessages) + { + // Validate the chat messages - we need at least one, and the last should be a user message + if (!chatMessages.Any()) + { + throw new ArgumentException("No chat messages provided", nameof(chatMessages)); + } + + if (!chatMessages.Last().Role.Equals(ChatRole.User)) + { + throw new PiecesClientException($"The last messages is expected to be a user message as this is the question that will be asked. It is a {chatMessages.Last().Role} message."); + } + + return chatMessages.Select(c => + { + var role = c.Role switch + { + ChatRole when c.Role == ChatRole.System => QGPTConversationMessageRoleEnum.SYSTEM, + ChatRole when c.Role == ChatRole.User => QGPTConversationMessageRoleEnum.USER, + _ => QGPTConversationMessageRoleEnum.ASSISTANT, + }; + return new SeedMessage(role, c.Text!); + }).ToList(); + } + + private async Task GetModelFromChatOptionsAsync(ChatOptions? options, CancellationToken cancellationToken) + { + // If we have a model ID, get that model + // Otherwise use the last one set + return (options?.ModelId) switch + { + not null => await piecesClient.DownloadModelAsync(options!.ModelId!, cancellationToken).ConfigureAwait(false), + _ => model + }; + } + + /// + public async IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, + ChatOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // Build the seeds + var seeds = GetSeedsFromChatMessages(chatMessages); + // Check that the last message is from the user - if not we don't have a question to ask. + var last = seeds.Last(); + // Remove the last, as this is passed as the question + seeds.Remove(last); + + // Create the chat + var chat = await CreateChatAsync(options, seeds, cancellationToken).ConfigureAwait(false); + + // Ask the question + await foreach (var r in chat.AskStreamingQuestionAsync(last.Message, + liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), + cancellationToken: cancellationToken)) + { + var response = new StreamingChatCompletionUpdate() + { + Text = r, + CompletionId = chat.Id, + Role = ChatRole.Assistant + }; + + yield return response; + } + } + + private async Task CreateChatAsync(ChatOptions? options, List seeds, CancellationToken cancellationToken) + { + // Ensure the copilot has been created + piecesCopilot ??= await piecesClient.GetCopilotAsync().ConfigureAwait(false); + + // extract the relevant properties from the options + model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); + var assetIds = GetAssetIdsFromOptions(options); + var liveContext = GetLiveContextFromOptions(options); + + // Create a new chat using all the messages that have been sent + return await piecesCopilot.CreateSeededChatAsync("", + model: model, + seeds: seeds, + assetIds: assetIds, + useLiveContext: liveContext, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + public void Dispose() + { + GC.SuppressFinalize(this); + } + + /// + public TService? GetService(object? key = null) + where TService : class + => key is null ? this as TService : null; +} \ No newline at end of file From 06ac3cbdbfde0e862da78f672d6364b44cd3b2f5 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Wed, 9 Oct 2024 13:11:18 -0700 Subject: [PATCH 07/24] Organizing example --- src/Client.Example/Program.cs | 318 +++++++++++++++++----------------- 1 file changed, 160 insertions(+), 158 deletions(-) diff --git a/src/Client.Example/Program.cs b/src/Client.Example/Program.cs index b71dc42..512a6bd 100644 --- a/src/Client.Example/Program.cs +++ b/src/Client.Example/Program.cs @@ -301,166 +301,168 @@ #endregion Change the model to a local model -#region Example 9 - Seed a conversation +#region Seed a conversation -// Example 9 - Seed a conversation +// Seed a conversation // // This example shows how to seed a conversation with a set of messages // That are used in the conversation -// var seeds = new List{ -// new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), -// new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), -// new(QGPTConversationMessageRoleEnum.ASSISTANT, @"To make a web request in a programming language, you typically use an HTTP client library. Below are examples in a few popular languages: - -// ### Python -// You can use the `requests` library to make web requests in Python. - -// ```python -// import requests - -// response = requests.get('https://api.example.com/data') -// if response.status_code == 200: -// print(response.json()) -// else: -// print(f'Error: {response.status_code}') -// ``` - -// ### JavaScript (using Fetch API) -// In JavaScript, you can use the Fetch API to make web requests. - -// ```javascript -// fetch('https://api.example.com/data') -// .then(response => { -// if (!response.ok) { -// throw new Error('Network response was not ok ' + response.statusText); -// } -// return response.json(); -// }) -// .then(data => console.log(data)) -// .catch(error => console.error('There was a problem with the fetch operation:', error)); -// ``` - -// ### Java (using HttpURLConnection) -// In Java, you can use `HttpURLConnection` to make web requests. - -// ```java -// import java.io.BufferedReader; -// import java.io.InputStreamReader; -// import java.net.HttpURLConnection; -// import java.net.URL; - -// public class WebRequestExample { -// public static void main(String[] args) { -// try { -// URL url = new URL(""https://api.example.com/data\""); -// HttpURLConnection conn = (HttpURLConnection) url.openConnection(); -// conn.setRequestMethod(""GET""); - -// int responseCode = conn.getResponseCode(); -// if (responseCode == HttpURLConnection.HTTP_OK) { -// BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); -// String inputLine; -// StringBuilder response = new StringBuilder(); - -// while ((inputLine = in.readLine()) != null) { -// response.append(inputLine); -// } -// in.close(); -// System.out.println(response.toString()); -// } else { -// System.out.println(""GET request failed: "" + responseCode); -// } -// } catch (Exception e) { -// e.printStackTrace(); -// } -// } -// } -// ``` - -// ### C# (using HttpClient) -// In C#, you can use `HttpClient` to make web requests. - -// ```csharp -// using System; -// using System.Net.Http; -// using System.Threading.Tasks; - -// class Program -// { -// static async Task Main() -// { -// using (HttpClient client = new HttpClient()) -// { -// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); -// if (response.IsSuccessStatusCode) -// { -// string data = await response.Content.ReadAsStringAsync(); -// Console.WriteLine(data); -// } -// else -// { -// Console.WriteLine($""Error: {response.StatusCode}""); -// } -// } -// } -// } -// ``` - -// Choose the example that fits the programming language you are using!"), -// new(QGPTConversationMessageRoleEnum.USER, "I am using C#"), -// new(QGPTConversationMessageRoleEnum.ASSISTANT, @"Great! Since you're using C#, you can use the `HttpClient` class to make web requests. Here's a simple example of how to perform a GET request: - -// ```csharp -// using System; -// using System.Net.Http; -// using System.Threading.Tasks; - -// class Program -// { -// static async Task Main() -// { -// using (HttpClient client = new HttpClient()) -// { -// try -// { -// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); -// if (response.IsSuccessStatusCode) -// { -// string data = await response.Content.ReadAsStringAsync(); -// Console.WriteLine(data); -// } -// else -// { -// Console.WriteLine($""Error: {response.StatusCode}""); -// } -// } -// catch (Exception e) -// { -// Console.WriteLine($""Exception occurred: {e.Message}""); -// } -// } -// } -// } -// ``` - -// ### Explanation: -// - **HttpClient**: This class is used to send HTTP requests and receive HTTP responses from a resource identified by a URI. -// - **GetAsync**: This method sends a GET request to the specified URI. -// - **IsSuccessStatusCode**: This property checks if the response status code indicates success (2xx). -// - **ReadAsStringAsync**: This method reads the response content as a string asynchronously. - -// Make sure to replace `""https://api.example.com/data""` with the actual URL you want to request. If you have any specific requirements or questions, feel free to ask!"), -// }; - -// var chat9 = await copilot.CreateSeededChatAsync("Question on async tasks", seeds: seeds).ConfigureAwait(false); - -// var question9 = "Comment this code"; -// var response9 = await chat9.AskQuestionAsync(question9); - -// Console.WriteLine(question9); -// Console.WriteLine(); -// Console.WriteLine(response9); -// Console.WriteLine(); - -#endregion Example 9 - Seed a conversation +{ + // var seeds = new List{ + // new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), + // new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), + // new(QGPTConversationMessageRoleEnum.ASSISTANT, @"To make a web request in a programming language, you typically use an HTTP client library. Below are examples in a few popular languages: + + // ### Python + // You can use the `requests` library to make web requests in Python. + + // ```python + // import requests + + // response = requests.get('https://api.example.com/data') + // if response.status_code == 200: + // print(response.json()) + // else: + // print(f'Error: {response.status_code}') + // ``` + + // ### JavaScript (using Fetch API) + // In JavaScript, you can use the Fetch API to make web requests. + + // ```javascript + // fetch('https://api.example.com/data') + // .then(response => { + // if (!response.ok) { + // throw new Error('Network response was not ok ' + response.statusText); + // } + // return response.json(); + // }) + // .then(data => console.log(data)) + // .catch(error => console.error('There was a problem with the fetch operation:', error)); + // ``` + + // ### Java (using HttpURLConnection) + // In Java, you can use `HttpURLConnection` to make web requests. + + // ```java + // import java.io.BufferedReader; + // import java.io.InputStreamReader; + // import java.net.HttpURLConnection; + // import java.net.URL; + + // public class WebRequestExample { + // public static void main(String[] args) { + // try { + // URL url = new URL(""https://api.example.com/data\""); + // HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + // conn.setRequestMethod(""GET""); + + // int responseCode = conn.getResponseCode(); + // if (responseCode == HttpURLConnection.HTTP_OK) { + // BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); + // String inputLine; + // StringBuilder response = new StringBuilder(); + + // while ((inputLine = in.readLine()) != null) { + // response.append(inputLine); + // } + // in.close(); + // System.out.println(response.toString()); + // } else { + // System.out.println(""GET request failed: "" + responseCode); + // } + // } catch (Exception e) { + // e.printStackTrace(); + // } + // } + // } + // ``` + + // ### C# (using HttpClient) + // In C#, you can use `HttpClient` to make web requests. + + // ```csharp + // using System; + // using System.Net.Http; + // using System.Threading.Tasks; + + // class Program + // { + // static async Task Main() + // { + // using (HttpClient client = new HttpClient()) + // { + // HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); + // if (response.IsSuccessStatusCode) + // { + // string data = await response.Content.ReadAsStringAsync(); + // Console.WriteLine(data); + // } + // else + // { + // Console.WriteLine($""Error: {response.StatusCode}""); + // } + // } + // } + // } + // ``` + + // Choose the example that fits the programming language you are using!"), + // new(QGPTConversationMessageRoleEnum.USER, "I am using C#"), + // new(QGPTConversationMessageRoleEnum.ASSISTANT, @"Great! Since you're using C#, you can use the `HttpClient` class to make web requests. Here's a simple example of how to perform a GET request: + + // ```csharp + // using System; + // using System.Net.Http; + // using System.Threading.Tasks; + + // class Program + // { + // static async Task Main() + // { + // using (HttpClient client = new HttpClient()) + // { + // try + // { + // HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); + // if (response.IsSuccessStatusCode) + // { + // string data = await response.Content.ReadAsStringAsync(); + // Console.WriteLine(data); + // } + // else + // { + // Console.WriteLine($""Error: {response.StatusCode}""); + // } + // } + // catch (Exception e) + // { + // Console.WriteLine($""Exception occurred: {e.Message}""); + // } + // } + // } + // } + // ``` + + // ### Explanation: + // - **HttpClient**: This class is used to send HTTP requests and receive HTTP responses from a resource identified by a URI. + // - **GetAsync**: This method sends a GET request to the specified URI. + // - **IsSuccessStatusCode**: This property checks if the response status code indicates success (2xx). + // - **ReadAsStringAsync**: This method reads the response content as a string asynchronously. + + // Make sure to replace `""https://api.example.com/data""` with the actual URL you want to request. If you have any specific requirements or questions, feel free to ask!"), + // }; + + // var chat = await copilot.CreateSeededChatAsync("Question on async tasks", seeds: seeds).ConfigureAwait(false); + + // var question = "Comment this code"; + // var response = await chat.AskQuestionAsync(question); + + // Console.WriteLine(question); + // Console.WriteLine(); + // Console.WriteLine(response); + // Console.WriteLine(); +} + +#endregion Seed a conversation From 2d05aae88cea3bd5c977560b46ca4c564b98f272 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Wed, 9 Oct 2024 19:11:04 -0700 Subject: [PATCH 08/24] Added chat caching and deletion --- src/Extensions.Example/Program.cs | 300 +++++++++++++++------ src/Extensions/Pieces.Extensions.AI.csproj | 2 +- src/Extensions/PiecesChatClient.cs | 292 ++++++++++++++------ 3 files changed, 431 insertions(+), 163 deletions(-) diff --git a/src/Extensions.Example/Program.cs b/src/Extensions.Example/Program.cs index 08f4aaa..7a5e654 100644 --- a/src/Extensions.Example/Program.cs +++ b/src/Extensions.Example/Program.cs @@ -9,7 +9,7 @@ // Set up logging var services = new ServiceCollection(); -services.AddLogging(builder =>builder.AddConsole()); +services.AddLogging(builder => builder.AddConsole()); var serviceProvider = services.BuildServiceProvider(); var logger = serviceProvider.GetRequiredService>(); @@ -17,120 +17,262 @@ IPiecesClient client = new PiecesClient(logger); var assets = await client.GetAssetsAsync().ConfigureAwait(false); -// Create a Chat completion -IChatClient chatClient = new PiecesChatClient(client); - -#region Example 1 - a continuous conversation +#region A continuous conversation // This example shows a continuous conversation. It starts by setting the system prompt, // then adds a user message, sends this to the chat completion, adds the response to the messages // then adds another user message and so on. This shows how to build up a conversation. -// // Set the system prompt -// var chatMessages = new List{ -// new(ChatRole.System, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), -// }; +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat", logger: logger); + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.System, "Answer every question from now on in the style of a pirate."), +// }; -// // Add the first user message -// chatMessages.Add(new ChatMessage(ChatRole.User, "How can I make a web request")); +// // Add the first user message +// chatMessages.Add(new ChatMessage(ChatRole.User, "Hello")); -// // Ask the question -// var response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); -// // Console.WriteLine(response); +// // Ask the question +// var response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// Console.WriteLine(response); -// // Add the response to the conversation -// chatMessages.Add(new ChatMessage(ChatRole.User, response.Message!.Text)); +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); -// // Ask the next question, but change the model -// chatMessages.Add(new ChatMessage(ChatRole.User, "I want to do this in C#")); +// // Ask the next question, but change the model +// chatMessages.Add(new ChatMessage(ChatRole.User, "Give me a single line of code to create a hello world in C#. No other text.")); -// var options = new ChatOptions() -// { -// ModelId = "Claude 3.5 Sonnet" -// }; -// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); -// // Console.WriteLine(response); +// var options = new ChatOptions() +// { +// ModelId = "Claude 3.5 Sonnet" +// }; +// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); + +// // Ask the next question +// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); + +// response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// Console.WriteLine(response); + +// Console.WriteLine(); +// } + +#endregion A continuous conversation -// // Add the response to the conversation -// chatMessages.Add(new ChatMessage(ChatRole.User, response.Message!.Text)); +#region A continuous conversation that doesn't persist the chat -// // Ask the next question -// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); +// This example shows a continuous conversation. It starts by setting the system prompt, +// then adds a user message, sends this to the chat completion, adds the response to the messages +// then adds another user message and so on. This shows how to build up a conversation. -// // This time stream the response -// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) // { -// Console.Write(r.Text); +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "PersistChat", false } +// } +// }; + +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat that is deleted", logger: logger); + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.System, "Answer every question from now on in the style of a pirate."), +// }; + +// // Add the first user message +// chatMessages.Add(new ChatMessage(ChatRole.User, "Hello")); + +// // Ask the question +// var response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); + +// // Ask the next question, but change the model +// chatMessages.Add(new ChatMessage(ChatRole.User, "Give me a single line of code to create a hello world in C#. No other text.")); + +// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); + +// // Ask the next question +// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); + +// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// Console.WriteLine(response); + +// Console.WriteLine(); // } -// Console.WriteLine(); +#endregion A continuous conversation that doesn't persist the chat + +#region A continuous streaming conversation + +// This example shows a continuous conversation. It starts by setting the system prompt, +// then adds a user message, sends this to the chat completion, adds the response to the messages +// then adds another user message and so on. This shows how to build up a conversation. + +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat", logger: logger); + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.System, "Answer every question from now on in the style of a pirate."), +// }; + +// // Add the first user message +// chatMessages.Add(new ChatMessage(ChatRole.User, "Hello")); + +// var response = ""; + +// // Ask the question +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// response += r.Text; +// } + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response)); + +// // Ask the next question, but change the model +// chatMessages.Add(new ChatMessage(ChatRole.User, "Give me a single line of code to create a hello world in C#. No other text.")); + +// var options = new ChatOptions() +// { +// ModelId = "Claude 3.5 Sonnet" +// }; -#endregion Example 1 - a continuous conversation +// response = ""; + +// // Ask the question +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// response += r.Text; +// } + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response)); + +// // Ask the next question +// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } + +// Console.WriteLine(); +// } -#region Example 2 - live context +#endregion A continuous streaming conversation + +#region Live context // This example shows how to use live context in a chat completion via the Additional Properties dictionary. // To run this, read this GitHub issue in your browser before running this: https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/issues/8 -// var chatMessages = new List{ -// new ChatMessage(ChatRole.User, "Describe the Add support for Microsoft.Extensions.AI github issue I was just reading about in my browser") -// }; -// var options = new ChatOptions() // { -// AdditionalProperties = new AdditionalPropertiesDictionary{ -// { "LiveContext", true }, -// { "LiveContextTimeSpan", TimeSpan.FromHours(1) } +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, logger: logger); + +// var chatMessages = new List{ +// new(ChatRole.User, "Describe the Add support for Microsoft.Extensions.AI github issue I was just reading about in my browser") +// }; + +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "LiveContext", true }, +// { "LiveContextTimeSpan", TimeSpan.FromHours(1) } +// } +// }; + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); // } -// }; -// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) -// { -// Console.Write(r.Text); +// Console.WriteLine(); // } -// Console.WriteLine(); +#endregion Live context -#endregion Example 2 - live context - -#region Example 3 - Create an asset and use it in a chat +#region Create an asset and use it in a chat // This example creates a new asset. It then uses it in a chat asking a question about it. -var assetCode = @"from pieces_os_client.wrapper import PiecesClient -pieces_client = PiecesClient() +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, logger: logger); + +// var assetCode = @"from pieces_os_client.wrapper import PiecesClient + +// pieces_client = PiecesClient() -# Set the question you want to ask -question = 'What is Object-Oriented Programming?' +// # Set the question you want to ask +// question = 'What is Object-Oriented Programming?' -# Ask the question and stream the response -for response in pieces_client.copilot.stream_question(question): - if response.question: - # Each answer is a chunk of the entire response to the question - answers = response.question.answers.iterable - for answer in answers: - print(answer.text,end='') +// # Ask the question and stream the response +// for response in pieces_client.copilot.stream_question(question): +// if response.question: +// # Each answer is a chunk of the entire response to the question +// answers = response.question.answers.iterable +// for answer in answers: +// print(answer.text,end='') -# Close the client -pieces_client.close() -"; +// # Close the client +// pieces_client.close() +// "; -var newAsset = await assets.CreateAssetAsync(assetCode).ConfigureAwait(false); -Console.WriteLine($"Asset created = name {newAsset.Name}, id: {newAsset.Id}"); +// var newAsset = await assets.CreateAssetAsync(assetCode).ConfigureAwait(false); +// Console.WriteLine($"Asset created = name {newAsset.Name}, id: {newAsset.Id}"); -var options = new ChatOptions() -{ - AdditionalProperties = new AdditionalPropertiesDictionary{ - { "AssetIds", new List{newAsset.Id} } - } -}; +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "AssetIds", new List{newAsset.Id} } +// } +// }; -var chatMessages = new List{ - new(ChatRole.User, "Describe this program") -}; +// var chatMessages = new List{ +// new(ChatRole.User, "Describe this program") +// }; -await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) -{ - Console.Write(r.Text); -} +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } +// } + +#endregion Create an asset and use it in a chat + +#region Chat client with a model + +// { +// var llamaModel = await client.DownloadModelAsync("llama-3 8B").ConfigureAwait(false); +// IChatClient modelChatClient = new PiecesChatClient(client, logger, llamaModel); + +// var chatMessages = new List{ +// new(ChatRole.User, "What model are you using"), +// }; + +// var response = await modelChatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// Console.WriteLine(response.Message.Text); +// } -#endregion Example 3 - assets \ No newline at end of file +#endregion Chat client with a model \ No newline at end of file diff --git a/src/Extensions/Pieces.Extensions.AI.csproj b/src/Extensions/Pieces.Extensions.AI.csproj index 1454425..e6dde34 100644 --- a/src/Extensions/Pieces.Extensions.AI.csproj +++ b/src/Extensions/Pieces.Extensions.AI.csproj @@ -31,7 +31,7 @@ 2024 Mesh Intelligent Technologies, Inc Pieces for Devlopers Pieces for Devlopers - Support for the Microsoft AI Extensions using Pieces for Developers + Support for Microsoft.Extensions.AI using Pieces for Developers as the LLM MIT copilot ai llm artificial intelligence code developer extensions https://pieces.app diff --git a/src/Extensions/PiecesChatClient.cs b/src/Extensions/PiecesChatClient.cs index a5f9673..61a9b59 100644 --- a/src/Extensions/PiecesChatClient.cs +++ b/src/Extensions/PiecesChatClient.cs @@ -1,19 +1,28 @@ using System.Runtime.CompilerServices; using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; using Pieces.Os.Core.SdkModel; using Pieces.OS.Client; using Pieces.OS.Client.Copilot; namespace Pieces.Extensions.AI; -public class PiecesChatClient(IPiecesClient piecesClient, Model? model = null) : IChatClient +public class PiecesChatClient(IPiecesClient piecesClient, string chatName = "", ILogger? logger = null, Model? model = null) : IChatClient { private readonly IPiecesClient piecesClient = piecesClient; + private readonly string chatName = chatName; + private readonly ILogger? logger = logger; private Model? model = model; private IPiecesCopilot? piecesCopilot; + // A cache of chats. These are keyed on a long string that is in the format "..." + // This is so we can take a set of chat messages and see if there is an existing conversation. If so, we can re-use this. + // If not, we create a new conversation. + // As new messages get added to the conversation, delete the old cache entry and create a new one. + private readonly Dictionary chatCache = []; + /// - public ChatClientMetadata Metadata => new ChatClientMetadata("Pieces for Developers", new Uri("https://pieces.app"), (model?.Name) ?? "Unknown model"); + public ChatClientMetadata Metadata => new("Pieces for Developers", new Uri("https://pieces.app"), (model?.Name) ?? "Unknown model"); // // Summary: @@ -30,6 +39,8 @@ public class PiecesChatClient(IPiecesClient piecesClient, Model? model = null) : // ["LiveContext"] = true/false; // set to true to use live context. Default to false. // ["LiveContextTimeSpan"] = TimeSpan?; // The timespan to use for live context. Defaults to 15 minutes if not set. // ["AssetIds"] = []; // Set to an enumerable of asset ids to use saved assets in the chat. Default to none. + // ["PersistChat] = true/false; // By defaults these chats are persisted in Pieces. If this is set to false, + // the chat is deleted after the response is returned // // cancellationToken: // The System.Threading.CancellationToken to monitor for cancellation requests. @@ -43,66 +54,230 @@ public class PiecesChatClient(IPiecesClient piecesClient, Model? model = null) : // intermediate messages generated implicitly by the client, including any messages // for roundtrips to the model as part of the implementation of this request, will // be included. + // + // For every unique set of chat messages, a new Pieces conversation will be created, + // unless PersistChat is set to false. A conversation will be reused if + // it matches an existing conversation created by this chat client. Conversations created outside of this instance of the + // client will not be used. public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - // Build the seeds - var seeds = GetSeedsFromChatMessages(chatMessages); - // Check that the last message is from the user - if not we don't have a question to ask. - var last = seeds.Last(); - // Remove the last, as this is passed as the question - seeds.Remove(last); + { + var chatWithCacheKey = await GetOrCreateChat(chatMessages, options, cancellationToken).ConfigureAwait(false); + + // Ask the question + var response = await chatWithCacheKey.Chat.AskQuestionAsync(chatMessages.Last().Text!, + liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), + cancellationToken: cancellationToken).ConfigureAwait(false); + + // Build the response + var responseMessage = new ChatMessage(ChatRole.Assistant, response); - // Create the chat - var chat = await CreateChatAsync(options, seeds, cancellationToken).ConfigureAwait(false); + // Cache or delete the chat depending on the options + await CacheOrDeleteChat(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); + + // Build the response + return new ChatCompletion([responseMessage]) + { + ModelId = chatWithCacheKey.Chat.Model.Id, + FinishReason = ChatFinishReason.Stop, + CompletionId = chatWithCacheKey.Chat.Id, + RawRepresentation = responseMessage.Text, + CreatedAt = DateTime.UtcNow, + Usage = new UsageDetails { + AdditionalProperties = options?.AdditionalProperties is null ? null : new AdditionalPropertiesDictionary(options.AdditionalProperties) + } + }; + } + + // + // Summary: + // Sends chat messages to the model and streams the response messages. + // + // Parameters: + // chatMessages: + // The chat content to send. + // + // options: + // The chat options to configure the request. To use Pieces specific features, set the following + // in the AdditionalProperties collection: + // + // ["LiveContext"] = true/false; // set to true to use live context. Default to false. + // ["LiveContextTimeSpan"] = TimeSpan?; // The timespan to use for live context. Defaults to 15 minutes if not set. + // ["AssetIds"] = []; // Set to an enumerable of asset ids to use saved assets in the chat. Default to none. + // ["PersistChat] = true/false; // By defaults these chats are persisted in Pieces. If this is set to false, + // the chat is deleted after the response is returned + // + // cancellationToken: + // The System.Threading.CancellationToken to monitor for cancellation requests. + // The default is System.Threading.CancellationToken.None. + // + // Returns: + // The response messages generated by the client. + // + // Remarks: + // The returned messages will not have been added to chatMessages. However, any + // intermediate messages generated implicitly by the client, including any messages + // for roundtrips to the model as part of the implementation of this request, will + // be included. + // + // For every unique set of chat messages, a new Pieces conversation will be created, + // unless PersistChat is set to false. A conversation will be reused if + // it matches an existing conversation created by this chat client. Conversations created outside of this instance of the + // client will not be used. + public async IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, + ChatOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var chatWithCacheKey = await GetOrCreateChat(chatMessages, options, cancellationToken).ConfigureAwait(false); + + var responseText = ""; // Ask the question - var response = await chat.AskQuestionAsync(last.Message, - liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), - cancellationToken: cancellationToken).ConfigureAwait(false); + await foreach (var r in chatWithCacheKey.Chat.AskStreamingQuestionAsync(chatMessages.Last().Text!, + liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), + cancellationToken: cancellationToken)) + { + var response = new StreamingChatCompletionUpdate() + { + Text = r, + CompletionId = chatWithCacheKey.Chat.Id, + Role = ChatRole.Assistant, + FinishReason = null, + CreatedAt = DateTime.UtcNow, + RawRepresentation = r, + AuthorName = "Pieces for Developers", + }; + + responseText += r; + + yield return response; + } // Build the response - return new ChatCompletion([new ChatMessage(ChatRole.Assistant, response)]) + var responseMessage = new ChatMessage(ChatRole.Assistant, responseText); + + // Cache or delete the chat depending on the options + await CacheOrDeleteChat(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); + + yield return new StreamingChatCompletionUpdate() { - ModelId = chat.Model.Id, + Text = responseText, + CompletionId = chatWithCacheKey.Chat.Id, + Role = ChatRole.Assistant, FinishReason = ChatFinishReason.Stop, - CompletionId = chat.Id, + CreatedAt = DateTime.UtcNow, + RawRepresentation = responseText, + AuthorName = "Pieces for Developers", }; } - private static IEnumerable? GetAssetIdsFromOptions(ChatOptions? options) + private async Task CacheOrDeleteChat(IList chatMessages, + ChatOptions? options, + ChatWithCacheKey chatWithCacheKey, + ChatMessage responseMessage, + CancellationToken cancellationToken) + { + // If we got this chat from the cache, remove the old entry as the messages will be updated to reflect this response + chatCache.Remove(chatWithCacheKey.CacheKey); + + var persist = GetBoolValueFromOptions(options, "PersistChat", true); + + if (persist) + { + // Cache the chat + var chatCacheKey = string.Join("", chatMessages.Append(responseMessage).Select(m => $"{m.Role}{m.Text}")); + chatCache.Add(chatCacheKey, chatWithCacheKey.Chat); + } + else + { + // Delete the chat + await piecesCopilot!.DeleteChatAsync(chatWithCacheKey.Chat, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private record ChatWithCacheKey(string CacheKey, ICopilotChat Chat); + + private async Task GetOrCreateChat(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken) { - if (options is not null && options.AdditionalProperties is not null) + // Look up this chat in our cache + var chatCacheKey = GetChatKey(chatMessages); + if (chatCache.TryGetValue(chatCacheKey, out var chat)) { - if (options!.AdditionalProperties!.TryGetValue("AssetIds", out object? assetIdsVal)) + logger?.LogInformation("Reusing conversation with Id {id}", chat?.Id); + + // Check the chat model - this can change between calls + model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); + if (model is not null && model.Id != chat!.Model.Id) { - return assetIdsVal as IEnumerable; + logger?.LogInformation("Updating conversation model to {model_name}", model.Name); + chat!.Model = model; } } + else + { + logger?.LogInformation("Creating a new conversation"); + + // Build the seeds + // Remove the last, as this is passed as the question + var seeds = GetSeedsFromChatMessages(chatMessages); + seeds.Remove(seeds.Last()); + + // Create the chat if we don't have one from our cache + // Ensure the copilot has been created + piecesCopilot ??= await piecesClient.GetCopilotAsync().ConfigureAwait(false); + + // extract the relevant properties from the options + model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); + var assetIds = GetAssetIdsFromOptions(options); + var liveContext = GetBoolValueFromOptions(options, "LiveContext"); + + // Create a new chat using all the messages that have been sent + chat = await piecesCopilot.CreateSeededChatAsync(chatName, + model: model, + seeds: seeds, + assetIds: assetIds, + useLiveContext: liveContext, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + return new(chatCacheKey, chat!); + } + + private static string GetChatKey(IList chatMessages) + { + return string.Join("", chatMessages.Take(chatMessages.Count - 1).Select(m => $"{m.Role}{m.Text}")); + } + + private static IEnumerable? GetAssetIdsFromOptions(ChatOptions? options) + { + if (options is not null && + options.AdditionalProperties is not null && + options!.AdditionalProperties!.TryGetValue("AssetIds", out object? assetIdsVal)) + { + return assetIdsVal as IEnumerable; + } return null; } - private static bool GetLiveContextFromOptions(ChatOptions? options) + private static bool GetBoolValueFromOptions(ChatOptions? options, string propertyName, bool defaultValue = false) { - if (options is not null && options.AdditionalProperties is not null) + if (options is not null && + options.AdditionalProperties is not null && + options!.AdditionalProperties!.TryGetValue(propertyName, out object? boolVal)) { - if (options!.AdditionalProperties!.TryGetValue("LiveContext", out object? liveContextVal)) - { - return liveContextVal is bool v && v; - } + return boolVal is bool v && v; } - return false; + return defaultValue; } private static TimeSpan? GetLiveContextTimeSpanFromOptions(ChatOptions? options) { - if (options is not null && options.AdditionalProperties is not null) + if (options is not null && + options.AdditionalProperties is not null && + options!.AdditionalProperties!.TryGetValue("LiveContextTimeSpan", out object? liveContextTimeSpanVal)) { - if (options!.AdditionalProperties!.TryGetValue("LiveContextTimeSpan", out object? liveContextTimeSpanVal)) - { - return liveContextTimeSpanVal as TimeSpan?; - } + return liveContextTimeSpanVal as TimeSpan?; } return null; @@ -116,6 +291,7 @@ private static List GetSeedsFromChatMessages(IList cha throw new ArgumentException("No chat messages provided", nameof(chatMessages)); } + // Check that the last message is from the user - if not we don't have a question to ask. if (!chatMessages.Last().Role.Equals(ChatRole.User)) { throw new PiecesClientException($"The last messages is expected to be a user message as this is the question that will be asked. It is a {chatMessages.Last().Role} message."); @@ -144,56 +320,6 @@ private static List GetSeedsFromChatMessages(IList cha }; } - /// - public async IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, - ChatOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - // Build the seeds - var seeds = GetSeedsFromChatMessages(chatMessages); - // Check that the last message is from the user - if not we don't have a question to ask. - var last = seeds.Last(); - // Remove the last, as this is passed as the question - seeds.Remove(last); - - // Create the chat - var chat = await CreateChatAsync(options, seeds, cancellationToken).ConfigureAwait(false); - - // Ask the question - await foreach (var r in chat.AskStreamingQuestionAsync(last.Message, - liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), - cancellationToken: cancellationToken)) - { - var response = new StreamingChatCompletionUpdate() - { - Text = r, - CompletionId = chat.Id, - Role = ChatRole.Assistant - }; - - yield return response; - } - } - - private async Task CreateChatAsync(ChatOptions? options, List seeds, CancellationToken cancellationToken) - { - // Ensure the copilot has been created - piecesCopilot ??= await piecesClient.GetCopilotAsync().ConfigureAwait(false); - - // extract the relevant properties from the options - model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); - var assetIds = GetAssetIdsFromOptions(options); - var liveContext = GetLiveContextFromOptions(options); - - // Create a new chat using all the messages that have been sent - return await piecesCopilot.CreateSeededChatAsync("", - model: model, - seeds: seeds, - assetIds: assetIds, - useLiveContext: liveContext, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - /// public void Dispose() { From f91b084fc78609bae669b51a2bb48e1451160bef Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Fri, 11 Oct 2024 13:02:43 -0700 Subject: [PATCH 09/24] Fixing build after merge --- src/Client/Copilot/PiecesCopilot.cs | 36 ++++++++++------------ src/Extensions/Pieces.Extensions.AI.csproj | 3 +- src/Extensions/PiecesChatClient.cs | 15 ++++----- 3 files changed, 26 insertions(+), 28 deletions(-) diff --git a/src/Client/Copilot/PiecesCopilot.cs b/src/Client/Copilot/PiecesCopilot.cs index 670a449..934605d 100644 --- a/src/Client/Copilot/PiecesCopilot.cs +++ b/src/Client/Copilot/PiecesCopilot.cs @@ -12,10 +12,10 @@ public class PiecesCopilot : IPiecesCopilot private readonly Application application; private readonly PiecesApis piecesApis; - internal PiecesCopilot(ILogger? logger, - Model model, - Application application, - WebSocketBackgroundClient client, + internal PiecesCopilot(ILogger? logger, + Model model, + Application application, + WebSocketBackgroundClient client, PiecesApis piecesApis) { this.logger = logger; @@ -46,16 +46,15 @@ public IEnumerable Chats /// The LLM model to use /// A cancellation token /// The new chat - public async Task CreateChatAsync(string chatName = "", - ChatContext? chatContext = null, - Model? model = default, - CancellationToken cancellationToken = default) + public Task CreateChatAsync(string chatName = "", + ChatContext? chatContext = null, + Model? model = default, + CancellationToken cancellationToken = default) { return CreateSeededChatAsync(chatName: chatName, seeds: null, - assetIds: assetIds, + chatContext: chatContext, model: model, - useLiveContext: useLiveContext, cancellationToken: cancellationToken); } @@ -64,17 +63,15 @@ public async Task CreateChatAsync(string chatName = "", /// /// An optional name for the chat. If nothing is provided, the name will be New conversation /// A set of seeded messages for the conversation - /// An optional list of asset Ids to add to the chat + /// An optional list of asset Ids to add to the chat /// The LLM model to use - /// Should this chat use live context or not /// A cancellation token /// The new chat public async Task CreateSeededChatAsync(string chatName = "", - IEnumerable? seeds = null, - IEnumerable? assetIds = null, - Model? model = default, - bool useLiveContext = false, - CancellationToken cancellationToken = default) + IEnumerable? seeds = null, + ChatContext? chatContext = null, + Model? model = default, + CancellationToken cancellationToken = default) { chatName = string.IsNullOrWhiteSpace(chatName) ? "New Conversation" : chatName; @@ -113,8 +110,9 @@ public async Task CreateSeededChatAsync(string chatName = "", var seededConversation = new SeededConversation(type: ConversationTypeEnum.COPILOT, name: chatName, messages: conversationMessages, - pipeline: pipeline; - var conversation = await conversationsApi.ConversationsCreateSpecificConversationAsync( + pipeline: pipeline + ); + var conversation = await piecesApis.ConversationsApi.ConversationsCreateSpecificConversationAsync( seededConversation: seededConversation, cancellationToken: cancellationToken).ConfigureAwait(false); diff --git a/src/Extensions/Pieces.Extensions.AI.csproj b/src/Extensions/Pieces.Extensions.AI.csproj index e6dde34..cb19ac5 100644 --- a/src/Extensions/Pieces.Extensions.AI.csproj +++ b/src/Extensions/Pieces.Extensions.AI.csproj @@ -2,7 +2,6 @@ - @@ -13,7 +12,7 @@ - + diff --git a/src/Extensions/PiecesChatClient.cs b/src/Extensions/PiecesChatClient.cs index 61a9b59..0e484bb 100644 --- a/src/Extensions/PiecesChatClient.cs +++ b/src/Extensions/PiecesChatClient.cs @@ -65,7 +65,6 @@ public async Task CompleteAsync(IList chatMessages, // Ask the question var response = await chatWithCacheKey.Chat.AskQuestionAsync(chatMessages.Last().Text!, - liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), cancellationToken: cancellationToken).ConfigureAwait(false); // Build the response @@ -133,8 +132,7 @@ public async IAsyncEnumerable CompleteStreamingAs // Ask the question await foreach (var r in chatWithCacheKey.Chat.AskStreamingQuestionAsync(chatMessages.Last().Text!, - liveContextTimeSpan: GetLiveContextTimeSpanFromOptions(options), - cancellationToken: cancellationToken)) + cancellationToken: cancellationToken)) { var response = new StreamingChatCompletionUpdate() { @@ -227,15 +225,18 @@ private async Task GetOrCreateChat(IList chatMess // extract the relevant properties from the options model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); - var assetIds = GetAssetIdsFromOptions(options); - var liveContext = GetBoolValueFromOptions(options, "LiveContext"); + + var chatContext = new ChatContext + { + AssetIds = GetAssetIdsFromOptions(options), + LiveContext = GetBoolValueFromOptions(options, "LiveContext"), + }; // Create a new chat using all the messages that have been sent chat = await piecesCopilot.CreateSeededChatAsync(chatName, model: model, seeds: seeds, - assetIds: assetIds, - useLiveContext: liveContext, + chatContext: chatContext, cancellationToken: cancellationToken).ConfigureAwait(false); } From 6708c0ed8399198b33911458435c77b99fe1b25d Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Fri, 11 Oct 2024 15:52:12 -0700 Subject: [PATCH 10/24] Working chat client --- src/Client/Copilot/CopilotChat.cs | 10 +- src/Client/Copilot/IPiecesCopilot.cs | 2 +- src/Client/Copilot/PiecesCopilot.cs | 4 +- src/Client/Copilot/Role.cs | 14 +++ src/Extensions.Example/Program.cs | 139 +++++++++++++++++++++++---- src/Extensions/PiecesChatClient.cs | 88 ++++++++--------- 6 files changed, 189 insertions(+), 68 deletions(-) diff --git a/src/Client/Copilot/CopilotChat.cs b/src/Client/Copilot/CopilotChat.cs index b2e003c..b523d17 100644 --- a/src/Client/Copilot/CopilotChat.cs +++ b/src/Client/Copilot/CopilotChat.cs @@ -34,15 +34,21 @@ internal CopilotChat(ILogger? logger, IWebSocketBackgroundClient webSocketClient, Conversation conversation, PiecesApis piecesApis, - ChatContext? chatContext) + ChatContext? chatContext, + IEnumerable? seeds = null) { this.logger = logger; - Model = model; this.application = application; this.webSocketClient = webSocketClient; this.conversation = conversation; this.piecesApis = piecesApis; + + if (seeds is not null) + { + messages.AddRange(seeds.Select(s => new Message(s.Role, s.Message))); + } + Model = model; ChatContext = chatContext; } diff --git a/src/Client/Copilot/IPiecesCopilot.cs b/src/Client/Copilot/IPiecesCopilot.cs index 91a7269..a0daa39 100644 --- a/src/Client/Copilot/IPiecesCopilot.cs +++ b/src/Client/Copilot/IPiecesCopilot.cs @@ -7,7 +7,7 @@ namespace Pieces.OS.Client.Copilot; /// /// /// -public record SeedMessage(QGPTConversationMessageRoleEnum Role, string Message); +public record SeedMessage(Role Role, string Message); public interface IPiecesCopilot { diff --git a/src/Client/Copilot/PiecesCopilot.cs b/src/Client/Copilot/PiecesCopilot.cs index 934605d..0c973c5 100644 --- a/src/Client/Copilot/PiecesCopilot.cs +++ b/src/Client/Copilot/PiecesCopilot.cs @@ -102,7 +102,7 @@ public async Task CreateSeededChatAsync(string chatName = "", { var fragment = new FragmentFormat(varString: new TransferableString(raw: s.Message)); return new SeededConversationMessage(model: model, - role: s.Role, + role: s.Role.ToQGPTRole(), fragment: fragment); }).ToList(); } @@ -118,7 +118,7 @@ public async Task CreateSeededChatAsync(string chatName = "", logger?.LogInformation("Conversation {name} created", chatName); - var chat = new CopilotChat(logger, model ?? Model, application, client, conversation, piecesApis, chatContext); + var chat = new CopilotChat(logger, model ?? Model, application, client, conversation, piecesApis, chatContext, seeds); copilotChats.Add(chat); logger?.LogInformation("Copilot chat {name} created", chatName); diff --git a/src/Client/Copilot/Role.cs b/src/Client/Copilot/Role.cs index 7b59367..81fc0ca 100644 --- a/src/Client/Copilot/Role.cs +++ b/src/Client/Copilot/Role.cs @@ -1,7 +1,21 @@ +using Pieces.Os.Core.SdkModel; + namespace Pieces.OS.Client.Copilot; public enum Role { + System, User, Assistant } + +public static class RoleExtensions +{ + public static QGPTConversationMessageRoleEnum ToQGPTRole(this Role role) => role switch + { + Role.System => QGPTConversationMessageRoleEnum.SYSTEM, + Role.User => QGPTConversationMessageRoleEnum.USER, + Role.Assistant => QGPTConversationMessageRoleEnum.ASSISTANT, + _ => throw new ArgumentException(nameof(role)), + }; +} \ No newline at end of file diff --git a/src/Extensions.Example/Program.cs b/src/Extensions.Example/Program.cs index 7a5e654..613b30e 100644 --- a/src/Extensions.Example/Program.cs +++ b/src/Extensions.Example/Program.cs @@ -25,7 +25,7 @@ // { // // Create a Chat completion -// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat", logger: logger); +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Continuous chat - {DateTime.Now.ToShortTimeString()}", logger: logger); // // Set the system prompt // var chatMessages = new List{ @@ -66,10 +66,66 @@ #endregion A continuous conversation -#region A continuous conversation that doesn't persist the chat +#region A continuous conversation that alters the messages // This example shows a continuous conversation. It starts by setting the system prompt, // then adds a user message, sends this to the chat completion, adds the response to the messages +// then adds another user message and so on. Part way through, the chat messages are changed from the +// current conversation. This will end up with a new conversation being created + +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Continuous chat that changes messages - {DateTime.Now.ToShortTimeString()}", logger: logger); + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.System, "Answer every question from now on in the style of a pirate."), +// }; + +// // Add the first user message +// chatMessages.Add(new ChatMessage(ChatRole.User, "Hello")); + +// // Ask the question +// var response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); + +// // Ask the next question, but change the model +// chatMessages.Add(new ChatMessage(ChatRole.User, "Give me a single line of code to create a hello world in C#. No other text.")); + +// var options = new ChatOptions() +// { +// ModelId = "Claude 3.5 Sonnet" +// }; +// response = await chatClient.CompleteAsync(chatMessages, options: options).ConfigureAwait(false); +// Console.WriteLine(response); + +// // Add the response to the conversation +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response.Message!.Text)); + +// // Add a question and answer so we deviate from the conversation +// chatMessages.Add(new ChatMessage(ChatRole.User, "What programming language is this?")); +// chatMessages.Add(new ChatMessage(ChatRole.Assistant, "This is in C#")); + +// // Ask the next question +// chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); + +// response = await chatClient.CompleteAsync(chatMessages).ConfigureAwait(false); +// Console.WriteLine(response); + +// Console.WriteLine(); +// } + +#endregion A continuous conversation that alters the messages + +#region A continuous conversation that doesn't persist the chat + +// This example shows a continuous conversation that alters the messages. +// +// It starts by setting the system prompt, +// then adds a user message, sends this to the chat completion, adds the response to the messages // then adds another user message and so on. This shows how to build up a conversation. // { @@ -81,7 +137,7 @@ // }; // // Create a Chat completion -// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat that is deleted", logger: logger); +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Continuous chat that is deleted - {DateTime.Now.ToShortTimeString()}", logger: logger); // // Set the system prompt // var chatMessages = new List{ @@ -126,7 +182,7 @@ // { // // Create a Chat completion -// IChatClient chatClient = new PiecesChatClient(client, chatName: "Continuous chat", logger: logger); +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Continuous streaming chat - {DateTime.Now.ToShortTimeString()}", logger: logger); // // Set the system prompt // var chatMessages = new List{ @@ -136,17 +192,17 @@ // // Add the first user message // chatMessages.Add(new ChatMessage(ChatRole.User, "Hello")); -// var response = ""; - // // Ask the question // await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) // { // Console.Write(r.Text); -// response += r.Text; -// } -// // Add the response to the conversation -// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response)); +// // Once done, store the result +// if (r.FinishReason == ChatFinishReason.Stop) +// { +// chatMessages.Add(new(ChatRole.Assistant, r.Text)); +// } +// } // // Ask the next question, but change the model // chatMessages.Add(new ChatMessage(ChatRole.User, "Give me a single line of code to create a hello world in C#. No other text.")); @@ -156,17 +212,17 @@ // ModelId = "Claude 3.5 Sonnet" // }; -// response = ""; - // // Ask the question // await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) // { // Console.Write(r.Text); -// response += r.Text; -// } -// // Add the response to the conversation -// chatMessages.Add(new ChatMessage(ChatRole.Assistant, response)); +// // Once done, store the result +// if (r.FinishReason == ChatFinishReason.Stop) +// { +// chatMessages.Add(new(ChatRole.Assistant, r.Text)); +// } +// } // // Ask the next question // chatMessages.Add(new ChatMessage(ChatRole.User, "Comment this code")); @@ -188,7 +244,7 @@ // { // // Create a Chat completion -// IChatClient chatClient = new PiecesChatClient(client, logger: logger); +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Live context chat - {DateTime.Now.ToShortTimeString()}", logger: logger); // var chatMessages = new List{ // new(ChatRole.User, "Describe the Add support for Microsoft.Extensions.AI github issue I was just reading about in my browser") @@ -212,13 +268,56 @@ #endregion Live context +#region Live context turned on after a question + +// This example shows how to use live context in a chat completion via the Additional Properties dictionary. +// To run this, read this GitHub issue in your browser before running this: https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/issues/8 + +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Live context chat - {DateTime.Now.ToShortTimeString()}", logger: logger); + +// var chatMessages = new List{ +// new(ChatRole.User, "Hello") +// }; + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages).ConfigureAwait(false)) +// { +// Console.Write(r.Text); + +// if (r.FinishReason == ChatFinishReason.Stop) +// { +// chatMessages.Add(new(ChatRole.Assistant, r.Text)); +// } +// } + +// chatMessages.Add(new(ChatRole.User, "Describe the Add support for Microsoft.Extensions.AI github issue I was just reading about in my browser")); + +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "LiveContext", true }, +// { "LiveContextTimeSpan", TimeSpan.FromHours(1) } +// } +// }; + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options: options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } + +// Console.WriteLine(); +// } + +#endregion Live context turned on after a question + #region Create an asset and use it in a chat // This example creates a new asset. It then uses it in a chat asking a question about it. // { // // Create a Chat completion -// IChatClient chatClient = new PiecesChatClient(client, logger: logger); +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Chat with an asset - {DateTime.Now.ToShortTimeString()}", logger: logger); // var assetCode = @"from pieces_os_client.wrapper import PiecesClient @@ -265,10 +364,10 @@ // { // var llamaModel = await client.DownloadModelAsync("llama-3 8B").ConfigureAwait(false); -// IChatClient modelChatClient = new PiecesChatClient(client, logger, llamaModel); +// IChatClient modelChatClient = new PiecesChatClient(client, chatName: $"Chat with a model - {DateTime.Now.ToShortTimeString()}", logger, llamaModel); // var chatMessages = new List{ -// new(ChatRole.User, "What model are you using"), +// new(ChatRole.User, "Which LLM are you?"), // }; // var response = await modelChatClient.CompleteAsync(chatMessages).ConfigureAwait(false); diff --git a/src/Extensions/PiecesChatClient.cs b/src/Extensions/PiecesChatClient.cs index 0e484bb..f05141b 100644 --- a/src/Extensions/PiecesChatClient.cs +++ b/src/Extensions/PiecesChatClient.cs @@ -61,7 +61,7 @@ public class PiecesChatClient(IPiecesClient piecesClient, string chatName = "", // client will not be used. public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { - var chatWithCacheKey = await GetOrCreateChat(chatMessages, options, cancellationToken).ConfigureAwait(false); + var chatWithCacheKey = await GetOrCreateChatAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); // Ask the question var response = await chatWithCacheKey.Chat.AskQuestionAsync(chatMessages.Last().Text!, @@ -71,7 +71,7 @@ public async Task CompleteAsync(IList chatMessages, var responseMessage = new ChatMessage(ChatRole.Assistant, response); // Cache or delete the chat depending on the options - await CacheOrDeleteChat(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); + await CacheOrDeleteChatAsync(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); // Build the response return new ChatCompletion([responseMessage]) @@ -81,7 +81,8 @@ public async Task CompleteAsync(IList chatMessages, CompletionId = chatWithCacheKey.Chat.Id, RawRepresentation = responseMessage.Text, CreatedAt = DateTime.UtcNow, - Usage = new UsageDetails { + Usage = new UsageDetails + { AdditionalProperties = options?.AdditionalProperties is null ? null : new AdditionalPropertiesDictionary(options.AdditionalProperties) } }; @@ -126,7 +127,7 @@ public async IAsyncEnumerable CompleteStreamingAs ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - var chatWithCacheKey = await GetOrCreateChat(chatMessages, options, cancellationToken).ConfigureAwait(false); + var chatWithCacheKey = await GetOrCreateChatAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); var responseText = ""; @@ -142,7 +143,7 @@ public async IAsyncEnumerable CompleteStreamingAs FinishReason = null, CreatedAt = DateTime.UtcNow, RawRepresentation = r, - AuthorName = "Pieces for Developers", + AuthorName = Metadata.ProviderName, }; responseText += r; @@ -154,7 +155,7 @@ public async IAsyncEnumerable CompleteStreamingAs var responseMessage = new ChatMessage(ChatRole.Assistant, responseText); // Cache or delete the chat depending on the options - await CacheOrDeleteChat(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); + await CacheOrDeleteChatAsync(chatMessages, options, chatWithCacheKey, responseMessage, cancellationToken).ConfigureAwait(false); yield return new StreamingChatCompletionUpdate() { @@ -164,15 +165,15 @@ public async IAsyncEnumerable CompleteStreamingAs FinishReason = ChatFinishReason.Stop, CreatedAt = DateTime.UtcNow, RawRepresentation = responseText, - AuthorName = "Pieces for Developers", + AuthorName = Metadata.ProviderName, }; } - private async Task CacheOrDeleteChat(IList chatMessages, - ChatOptions? options, - ChatWithCacheKey chatWithCacheKey, - ChatMessage responseMessage, - CancellationToken cancellationToken) + private async Task CacheOrDeleteChatAsync(IList chatMessages, + ChatOptions? options, + ChatWithCacheKey chatWithCacheKey, + ChatMessage responseMessage, + CancellationToken cancellationToken) { // If we got this chat from the cache, remove the old entry as the messages will be updated to reflect this response chatCache.Remove(chatWithCacheKey.CacheKey); @@ -194,7 +195,7 @@ private async Task CacheOrDeleteChat(IList chatMessages, private record ChatWithCacheKey(string CacheKey, ICopilotChat Chat); - private async Task GetOrCreateChat(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken) + private async Task GetOrCreateChatAsync(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken) { // Look up this chat in our cache var chatCacheKey = GetChatKey(chatMessages); @@ -209,9 +210,14 @@ private async Task GetOrCreateChat(IList chatMess logger?.LogInformation("Updating conversation model to {model_name}", model.Name); chat!.Model = model; } + + chat!.ChatContext = CreateChatContextFromOptions(options); } else { + // Ensure the copilot has been created + piecesCopilot ??= await piecesClient.GetCopilotAsync().ConfigureAwait(false); + logger?.LogInformation("Creating a new conversation"); // Build the seeds @@ -220,44 +226,52 @@ private async Task GetOrCreateChat(IList chatMess seeds.Remove(seeds.Last()); // Create the chat if we don't have one from our cache - // Ensure the copilot has been created - piecesCopilot ??= await piecesClient.GetCopilotAsync().ConfigureAwait(false); // extract the relevant properties from the options model = await GetModelFromChatOptionsAsync(options, cancellationToken).ConfigureAwait(false); - var chatContext = new ChatContext - { - AssetIds = GetAssetIdsFromOptions(options), - LiveContext = GetBoolValueFromOptions(options, "LiveContext"), - }; - // Create a new chat using all the messages that have been sent chat = await piecesCopilot.CreateSeededChatAsync(chatName, model: model, seeds: seeds, - chatContext: chatContext, + chatContext: CreateChatContextFromOptions(options), cancellationToken: cancellationToken).ConfigureAwait(false); } return new(chatCacheKey, chat!); } - private static string GetChatKey(IList chatMessages) + private static ChatContext CreateChatContextFromOptions(ChatOptions? options) { - return string.Join("", chatMessages.Take(chatMessages.Count - 1).Select(m => $"{m.Role}{m.Text}")); + return new ChatContext + { + AssetIds = GetValueFromOptions>(options, "AssetIds"), + LiveContext = GetBoolValueFromOptions(options, "LiveContext"), + LiveContextTimeSpan = GetValueFromOptions(options, "LiveContextTimeSpan", null), + Files = GetValueFromOptions>(options, "Files"), + Folders = GetValueFromOptions>(options, "Folders"), + }; } - private static IEnumerable? GetAssetIdsFromOptions(ChatOptions? options) + private static string GetChatKey(IList chatMessages) => string.Join("", chatMessages.Take(chatMessages.Count - 1).Select(m => $"{m.Role}{m.Text}")); + + private static T? GetValueFromOptions(ChatOptions? options, string propertyName, T? defaultValue = default) { if (options is not null && options.AdditionalProperties is not null && - options!.AdditionalProperties!.TryGetValue("AssetIds", out object? assetIdsVal)) + options!.AdditionalProperties!.TryGetValue(propertyName, out object? val)) { - return assetIdsVal as IEnumerable; + if (val is T tValue) + { + return tValue; + } + else if (typeof(T).IsValueType && val is IConvertible) + { + return (T)Convert.ChangeType(val, typeof(T)); + } } - return null; + return defaultValue; } private static bool GetBoolValueFromOptions(ChatOptions? options, string propertyName, bool defaultValue = false) @@ -272,18 +286,6 @@ options.AdditionalProperties is not null && return defaultValue; } - private static TimeSpan? GetLiveContextTimeSpanFromOptions(ChatOptions? options) - { - if (options is not null && - options.AdditionalProperties is not null && - options!.AdditionalProperties!.TryGetValue("LiveContextTimeSpan", out object? liveContextTimeSpanVal)) - { - return liveContextTimeSpanVal as TimeSpan?; - } - - return null; - } - private static List GetSeedsFromChatMessages(IList chatMessages) { // Validate the chat messages - we need at least one, and the last should be a user message @@ -302,9 +304,9 @@ private static List GetSeedsFromChatMessages(IList cha { var role = c.Role switch { - ChatRole when c.Role == ChatRole.System => QGPTConversationMessageRoleEnum.SYSTEM, - ChatRole when c.Role == ChatRole.User => QGPTConversationMessageRoleEnum.USER, - _ => QGPTConversationMessageRoleEnum.ASSISTANT, + ChatRole when c.Role == ChatRole.System => OS.Client.Copilot.Role.System, + ChatRole when c.Role == ChatRole.User => OS.Client.Copilot.Role.User, + _ => OS.Client.Copilot.Role.Assistant, }; return new SeedMessage(role, c.Text!); }).ToList(); From 0fd2f2b057cf72caa8e49c94f0cab3f8ccab1687 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 15 Oct 2024 14:31:48 -0700 Subject: [PATCH 11/24] Splitting readmes into separate for each package --- README.md | 229 ++------------------- src/Client/Pieces.OS.Client.csproj | 2 +- src/Client/README.md | 97 +++++++++ src/Extensions/Pieces.Extensions.AI.csproj | 2 +- src/Extensions/README.md | 73 +++++++ 5 files changed, 193 insertions(+), 210 deletions(-) create mode 100644 src/Client/README.md create mode 100644 src/Extensions/README.md diff --git a/README.md b/README.md index 3cda67b..59bc14b 100644 --- a/README.md +++ b/README.md @@ -4,14 +4,20 @@ [![GitHub contributors](https://img.shields.io/github/contributors/pieces-app/pieces-os-client-sdk-for-csharp.svg)](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/graphs/contributors) [![GitHub issues by-label](https://img.shields.io/github/issues/pieces-app/pieces-os-client-sdk-for-csharp)](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/issues) -[![Pieces OS Client SDK on nuget](https://img.shields.io/nuget/vpre/Pieces.OS.Client)](https://www.nuget.org/packages/Pieces.OS.Client/) +[![Pieces.OS.Client SDK on nuget](https://img.shields.io/nuget/vpre/Pieces.OS.Client)](https://www.nuget.org/packages/Pieces.OS.Client/) +[![Pieces.Extensions.AI SDK on nuget](https://img.shields.io/nuget/vpre/Pieces.Extensions.AI)](https://www.nuget.org/packages/Pieces.Extensions.AI/) [![Discord](https://img.shields.io/badge/Discord-@layer5.svg?color=7389D8&label&logo=discord&logoColor=ffffff)](https://discord.gg/getpieces) [![Twitter Follow](https://img.shields.io/twitter/follow/pieces.svg?label=Follow)](https://twitter.com/getpieces) [![License](https://img.shields.io/github/license/pieces-app/pieces-os-client-sdk-for-csharp.svg)](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/blob/main/LICENSE) ## Introduction -The Pieces OS Client SDK is a powerful code engine package designed for writing applications on top of Pieces OS. It facilitates communication with a locally hosted server to enable features such as copilot chats, asset saving, and more. +The Pieces OS Client SDK is a set of powerful code engine packages designed for writing applications on top of Pieces OS. It facilitates communication with a locally hosted server to enable features such as copilot chats, asset saving, and more. + +This SDK has 2 packages: + +- [Pieces.Os.Client](https://www.nuget.org/packages/Pieces.OS.Client/) - this is the core SDK package providing access to the features of Pieces from your C# application +- [Pieces.Extensions.AI](https://www.nuget.org/packages/Pieces.Extensions.AI/) - this is an implementation of [Microsoft.Extensions.AI](https://www.nuget.org/packages/Microsoft.Extensions.AI/) using Pieces to provide support for multiple LLMs, as well as adding context such as snippets, files, folders, and live context to your AI conversation. ## Features @@ -21,6 +27,7 @@ The Pieces SDK offers the following key features: 1. Asset Management: Save and manage assets and formats efficiently. 1. Local Server Interaction: Interact with a locally hosted server for various functionality. 1. Multi LLMs support: Use any Pieces supported LLM to power your app. +1. File, folder, and live context in copilot chats ## Installation @@ -31,220 +38,26 @@ To get started with the Pieces OS Client SDK, follow these steps: - [Windows](https://docs.pieces.app/installation-getting-started/windows) - [Linux](https://docs.pieces.app/installation-getting-started/linux) -2. **Install the SDK**: Use nuget to install the Pieces OS Client SDK package: +1. **Install the SDK**: Use nuget to install the Pieces OS Client SDK package: ```shell - dotnet add package Pieces.OS.Client + dotnet add package Pieces.OS.Client --prerelease ``` -## Pieces Client Examples - -There is an example project using the Pieces Client in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. - -### Set up the SDK - -To set up the SDK, create an instance of the `PiecesClient`. By default this will connect to Pieces OS running locally on the default port, but you can override this by passing in the URL and port as the `baseUrl` parameter. - -The `PiecesClient` constructor also optionally takes an `ILogger` to provide full logging for the SDK. - -```csharp -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; - -using Pieces.OS.Client; - -// Set up logging -var services = new ServiceCollection(); -services.AddLogging(builder =>builder.AddConsole()); -var serviceProvider = services.BuildServiceProvider(); -var logger = serviceProvider.GetRequiredService>(); - -// Create the Pieces client -IPiecesClient client = new PiecesClient(logger); - -// Write out the version -Console.WriteLine($"Pieces OS version: {await client.GetVersionAsync().ConfigureAwait(false)}"); -``` - -### Use the copilot - -To interact with the Pieces copilot, use the `IPiecesClient.GetCopilotAsync()` method to return an `IPiecesCopilot` instance. - -```csharp -var copilot = await client.GetCopilotAsync().ConfigureAwait(false); -``` - -Once you have the `IPiecesCopilot`, you can ask questions, and get the response either as a single string, or streaming data. - -#### Create a chat and ask a question: - -This will create a new copilot chat that you will be able to see in other Pieces applications, such as Pieces Desktop, or Pieces for Visual Studio Code. This chat will be named `"C# question on async tasks"`, and you will see this name in other Pieces applications. The chat will ask a question, then return the full answer to the console once it has the complete answer. - -```csharp -var chat = await copilot.CreateChatAsync("C# question on async tasks").ConfigureAwait(false); - -Console.WriteLine(await chat.AskQuestionAsync("What does the async keyword do in C#?")); -``` - -#### Create a chat and ask a question, then a follow up question: - -This example creates a copilot chat, and asks 2 connected questions, showing the response from each once it has been completely generated. - -```csharp -var chat = await copilot.CreateChatAsync("2 C# questions on async tasks").ConfigureAwait(false); - -Console.WriteLine(await chat2.AskQuestionAsync("What does the async keyword do in C#?").ConfigureAwait(false)); -Console.WriteLine(await chat2.AskQuestionAsync("Give me an example using it with an HTTP call?").ConfigureAwait(false)); -``` - -#### Create a chat with seeded messages: - -This example shows how to seed a conversation with a set of messages that are used in the conversation - -```csharp -var seeds = new List{ - new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), - new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), - new(QGPTConversationMessageRoleEnum.ASSISTANT, "To make a web request in a programming language, you typically use an HTTP client library."), -}; - -var chat = await copilot.CreateSeededChatAsync("C# web requests", - seeds: seeds).ConfigureAwait(false); - -Console.WriteLine(await chat.AskQuestionAsync("What about in C#?").ConfigureAwait(false)); -``` - -#### Stream the response - -This will create a new copilot chat, ask a question, then stream the response back token by token. - -```csharp -var chat = await copilot.CreateChatAsync("C# streaming question on primary constructors").ConfigureAwait(false); - -await foreach (var token in chat.AskStreamingQuestionAsync("In C#, what is a primary constructor? Give me some example code that declares a class with one.")) -{ - Console.Write(token); -} -``` - -#### Use live context - -This will create a new copilot chat called 1 hour context window, with live context turned on that you will be able to see in other Pieces applications. You will also be able to see live context turned on against the chat. The chat will ask a question related to this code file using a 1 hour context window, then stream the response back token by token. - -```csharp -var chat = await copilot.CreateChatAsync("1 hour context window", useLiveContext: true).ConfigureAwait(false); - -await foreach (var token in chat.AskStreamingQuestionAsync("Describe the Program.cs file I was just reading in my IDE", liveContextTimeSpan: TimeSpan.FromHours(1))) -{ - Console.Write(token); -} -``` - -### Manage models - -The SDK allows you to interact with all the available models, including listing them all, and downloading on-device models. - -#### List all models - -This example lists out all the LLMs Pieces currently supports, including if the model is downloaded for on-device models. - -```csharp -foreach (var model in await client.GetModelsAsync().ConfigureAwait(false)) -{ - var modelStatus = model.Cloud ? "Cloud model" : "On-device model"; - if (!model.Cloud) - { - var downloaded = model.Downloaded ? "downloaded" : "Not downloaded"; - modelStatus += $", {downloaded}"; - } - - Console.WriteLine($"Model: {model.Name}, ID: {model.Id}. {modelStatus}"); -} -``` - -#### Change the model - -This example shows how to select a different model and use it for a copilot chat - -```csharp -var models = await client.GetModelsAsync().ConfigureAwait(false); -var claudeModel = models.First(m => m.Name.Contains("Claude 3.5 Sonnet", StringComparison.CurrentCultureIgnoreCase)); - -var chat = await copilot.CreateChatAsync("Chat with a different model", model: claudeModel).ConfigureAwait(false); -``` - -#### Change the model to a local model - -This example shows how to select a local model and use it for a copilot chat. The model is a local model, so may need to be downloaded. If the model needs to be downloaded, you will be able to monitor the progress in Pieces Desktop - -```csharp -var models = await client.GetModelsAsync().ConfigureAwait(false); -var llamaModel = models.First(m => m.Name.Contains("Llama-3", StringComparison.CurrentCultureIgnoreCase)); - -if (!llamaModel.Downloaded) -{ - await client.DownloadModelAsync(llamaModel).ConfigureAwait(false); -} - -var chat = await copilot.CreateChatAsync("Chat with a different model", model: llamaModel).ConfigureAwait(false); -``` - -### Interact with assets - -To interact with assets in Pieces, such as saved code snippets, use the `IPiecesClient.GetAssetsAsync()` method to return an `IPiecesAssets` instance. - -```csharp -var assets = await client.GetAssetsAsync().ConfigureAwait(false); -``` - -Once you have the `IPiecesAssets`, you can get and create assets. - -#### Load assets - -This example loads all the assets in your current Pieces OS and prints the names and programming language. - -```csharp -await foreach(var asset in assets.GetAllAssetsAsync()) -{ - var language = asset.Formats.Iterable.FirstOrDefault(a => !string.IsNullOrWhiteSpace(a?.Analysis?.Code?.Language))?.Analysis?.Code?.Language; - Console.WriteLine($"{asset.Name} - {language}"); -} -``` +1. **Install the Extensions SDK**: If you want [Microsoft.Extensions.AI](https://www.nuget.org/packages/Microsoft.Extensions.AI/) support, install the Pieces.Extensions.AI package: -#### Create an asset and use it in a copilot chat + ```shell + dotnet add package Pieces.Extensions.AI --prerelease + ``` -This example creates a new asset. It then uses it in a copilot chat asking a question about it. +## Pieces.OS.Client Examples -```csharp -var assetCode = @"using System; +There is an example project using the Pieces.OS.Client package in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. -class Program -{ - static void Main(string[] args) - { - // Prompt the user for their name - Console.Write(""Please enter your name: ""); - - // Read the user's input - string name = Console.ReadLine(); - - // Print a greeting with the user's name - Console.WriteLine($""Hello, {name}! Nice to meet you.""); - - // Wait for the user to press a key before closing the console window - Console.WriteLine(""Press any key to exit...""); - Console.ReadKey(); - } -} -"; +Details of this example project is provided in the [src/Client/README.md file](./src/Client/README.md). -var newAsset = await assets.CreateAssetAsync(assetCode).ConfigureAwait(false); -Console.WriteLine($"Asset created = name {newAsset.Name}, id: {newAsset.Id}"); +## Pieces.Extensions.AI Examples -var chat = await copilot.CreateChatAsync("C# chat with an asset", assetIds: [newAsset.Id]).ConfigureAwait(false); +There is an example project using the Pieces.Extensions.AI package in the [`./src/Extensions.Example`](./src/Extensions.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Extensions.Example`](./src/Extensions.Example) folder. -await foreach (var token in chat6.AskStreamingQuestionAsync("Describe this C# program")) -{ - Console.Write(token); -} -``` +Details of this example project is provided in the [src/Extensions/README.md file](./src/Extensions/README.md). diff --git a/src/Client/Pieces.OS.Client.csproj b/src/Client/Pieces.OS.Client.csproj index 4656851..496975f 100644 --- a/src/Client/Pieces.OS.Client.csproj +++ b/src/Client/Pieces.OS.Client.csproj @@ -36,7 +36,7 @@ - + diff --git a/src/Client/README.md b/src/Client/README.md new file mode 100644 index 0000000..50d32df --- /dev/null +++ b/src/Client/README.md @@ -0,0 +1,97 @@ +# Pieces OS Client SDK For C# + +The Pieces OS Client SDK is a powerful code engine package designed for writing applications on top of Pieces OS. It facilitates communication with a locally hosted server to enable features such as copilot chats, asset saving, and more. + +## Features + +The Pieces SDK offers the following key features: + +1. Copilot Chats: Communicate seamlessly with copilot chats functionality. +1. Asset Management: Save and manage assets and formats efficiently. +1. Local Server Interaction: Interact with a locally hosted server for various functionality. +1. Multi LLMs support: Use any Pieces supported LLM to power your app. + +## Installation + +To get started with the Pieces OS Client SDK, follow these steps: + +1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. Download the appropriate version for your operating system: + - [macOS](https://docs.pieces.app/installation-getting-started/macos) + - [Windows](https://docs.pieces.app/installation-getting-started/windows) + - [Linux](https://docs.pieces.app/installation-getting-started/linux) + +1. **Install the SDK**: Use nuget to install the Pieces OS Client SDK package: + + ```shell + dotnet add package Pieces.OS.Client --prerelease + ``` + +## Pieces Client Examples + +There is an example project using the Pieces Client in the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example) project. + +## Quickstart + +To set up the SDK, create an instance of the `PiecesClient`. By default this will connect to Pieces OS running locally on the default port, but you can override this by passing in the URL and port as the `baseUrl` parameter. + +The `PiecesClient` constructor also optionally takes an `ILogger` to provide full logging for the SDK. + +```csharp +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +using Pieces.OS.Client; + +// Set up logging (optional) +var services = new ServiceCollection(); +services.AddLogging(builder =>builder.AddConsole()); +var serviceProvider = services.BuildServiceProvider(); +var logger = serviceProvider.GetRequiredService>(); + +// Create the Pieces client +IPiecesClient client = new PiecesClient(logger); + +// Write out the version +Console.WriteLine($"Pieces OS version: {await client.GetVersionAsync().ConfigureAwait(false)}"); +``` + +### Use the copilot + +To interact with the Pieces copilot, use the `IPiecesClient.GetCopilotAsync()` method to return an `IPiecesCopilot` instance. + +```csharp +var copilot = await client.GetCopilotAsync().ConfigureAwait(false); +``` + +Once you have the `IPiecesCopilot`, you can ask questions, and get the response either as a single string, or streaming data. + +### Create a chat and ask a question: + +This will create a new copilot chat that you will be able to see in other Pieces applications, such as Pieces Desktop, or Pieces for Visual Studio Code. This chat will be named `"C# question on async tasks"`, and you will see this name in other Pieces applications. The chat will ask a question, then return the full answer to the console once it has the complete answer. + +```csharp +var chat = await copilot.CreateChatAsync("C# question on async tasks").ConfigureAwait(false); +Console.WriteLine(await chat.AskQuestionAsync("What does the async keyword do in C#?")); +``` + +### Create a chat and stream the response + +This will create a new copilot chat, ask a question, then stream the response back token by token. + +```csharp +var chat = await copilot.CreateChatAsync("C# streaming question on primary constructors").ConfigureAwait(false); +await foreach (var token in chat.AskStreamingQuestionAsync("In C#, what is a primary constructor? Give me some example code that declares a class with one.")) +{ + Console.Write(token); +} +``` + +### Interact with assets + +To interact with assets in Pieces, such as saved code snippets, use the `IPiecesClient.GetAssetsAsync()` method to return an `IPiecesAssets` instance. + +```csharp +var assets = await client.GetAssetsAsync().ConfigureAwait(false); +``` + +Once you have the `IPiecesAssets`, you can get and create assets. diff --git a/src/Extensions/Pieces.Extensions.AI.csproj b/src/Extensions/Pieces.Extensions.AI.csproj index cb19ac5..e080fff 100644 --- a/src/Extensions/Pieces.Extensions.AI.csproj +++ b/src/Extensions/Pieces.Extensions.AI.csproj @@ -40,7 +40,7 @@ - + diff --git a/src/Extensions/README.md b/src/Extensions/README.md new file mode 100644 index 0000000..da58d89 --- /dev/null +++ b/src/Extensions/README.md @@ -0,0 +1,73 @@ +# Pieces.Extensions.AI - an implementation of Microsoft.Extensions.AI using Pieces + +The Pieces.Extensions.AI package is an implementation of [Microsoft.Extensions.AI](https://www.nuget.org/packages/Microsoft.Extensions.AI/) using Pieces to provide support for multiple LLMs, as well as adding context such as snippets, files, folders, and live context to your AI conversation. + +## Features + +The Pieces.Extensions.AI package offers the following key features: + +1. `IChatClient` implementation +1. Ability to add files, folders, assets, and local context via the `ChatOptions.AdditionalProperties` +1. Ability to select the model via the `ChatOptions.AdditionalProperties` + +## Installation + +To get started with the Pieces.Extensions.AI, follow these steps: + +1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. Download the appropriate version for your operating system: + - [macOS](https://docs.pieces.app/installation-getting-started/macos) + - [Windows](https://docs.pieces.app/installation-getting-started/windows) + - [Linux](https://docs.pieces.app/installation-getting-started/linux) + +1. **Install the SDK**: Use nuget to install the Pieces OS Client SDK package: + + ```shell + dotnet add package Pieces.Extensions.AI --prerelease + ``` + +## Pieces.Extensions.AI Examples + +There is an example project using the Pieces.Extensions.AI package in the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example) project. + +## Quickstart + +The `Microsoft.Extensions.AI.IChatClient` interface is implemented in the `PiecesChatClient` object. Create an instance of this using a `IPiecesClient`: + +```csharp +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Pieces.Extensions.AI; +using Pieces.OS.Client; + +// Set up logging +var services = new ServiceCollection(); +services.AddLogging(builder => builder.AddConsole()); +var serviceProvider = services.BuildServiceProvider(); +var logger = serviceProvider.GetRequiredService>(); + +// Create the Pieces client +IPiecesClient client = new PiecesClient(logger); +var assets = await client.GetAssetsAsync().ConfigureAwait(false); + +IChatClient chatClient = new PiecesChatClient(client, chatName: "A new chat", logger: logger); +``` + +When you use the chat client, a new conversation is create in Pieces, and each time you call `chatClient.CompleteAsync` or `chatClient.CompleteStreamingAsync`, the same conversation is continued - assuming that the original chat messages have not changed. If this happens, a new conversation is created. + +You can configure the chat with the following `ChatOptions` settings: + +| Setting | Type | Default | Description | +| --------- | -------- | -------- | ----------- | +| `modelId` | `string` | `GPT-4o` | The Id of the model. This can be set using the model Ids from the Pieces.OS.Client SDK. If not set, the current model for the Pieces Client used to create this is used. | + +You can also set the following in the `ChatOptions.AdditionalProperties` dictionary: + +| Setting | Type | Default | Description | +| --------------------- | ---------------------- | -------- | ----------- | +| `PersistChat` | `bool` | `true` | Should the conversation be saved in Pieces> If this is false, after each chat message, the conversation is deleted and a new one started. | +| `AssetIds` | `IEnumerable?` | `null` | A list of asset Ids to add as context to this conversation. | +| `Files` | `IEnumerable?` | `null` | A list of file paths to add as context to this conversation. | +| `Folders` | `IEnumerable?` | `null` | A list of folder paths to add as context to this conversation. | +| `LiveContext` | `bool` | `false` | Should this conversation include live context. | +| `LiveContextTimeSpan` | `TimeSpan?` | `null` | The time span to use for the live context window. | From 4b55c084ba394100fde7921dbfbc03e955044e31 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 15 Oct 2024 14:31:53 -0700 Subject: [PATCH 12/24] Publish extensions package --- .github/workflows/publish-nuget.yaml | 33 ++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-nuget.yaml b/.github/workflows/publish-nuget.yaml index 5e95649..4881720 100644 --- a/.github/workflows/publish-nuget.yaml +++ b/.github/workflows/publish-nuget.yaml @@ -6,28 +6,53 @@ on: - '*' jobs: - publish-client-to-nuget: + publish-to-nuget: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + + # Set up .NET 8 - name: Setup .NET uses: actions/setup-dotnet@v4 with: dotnet-version: 8.0.* - - name: Build + + # Build the main client nuget package + - name: Build Client working-directory: ./src/Client run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/v} + + # Build the Microsoft.Extensions.AI implementation package + - name: Build AI Extension + working-directory: ./src/Extensions + run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/v} + + # Download the nuget signing certificate to a local file from the Actions secret - name: Get the nuget signing certificate id: cert_file uses: timheuer/base64-to-file@v1.2 with: fileName: 'certfile.pfx' encodedString: ${{ secrets.NUGET_CERTIFICATE }} - - name: Sign the nuget package + + # Sign the Client nuget package with the certificate from the Actions secret + - name: Sign the Client nuget package working-directory: ./src/Client run: dotnet nuget sign ./bin/Release/Pieces.OS.Client.${GITHUB_REF#refs/tags/v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com - - name: Push to NuGet + + # Sign the Microsoft.Extensions.AI implementation nuget package with the certificate from the Actions secret + - name: Sign the Extensions nuget package + working-directory: ./src/Extensions + run: dotnet nuget sign ./bin/Release/Pieces.Extensions.AI.${GITHUB_REF#refs/tags/v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com + + # Push the client nuget package to nuget + - name: Push the Client package to NuGet working-directory: ./src/Client run: dotnet nuget push ./bin/Release/*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://nuget.org + # Push the Microsoft.Extensions.AI implementation nuget package to nuget + - name: Push the Extensions package to NuGet + working-directory: ./src/Extensions + run: dotnet nuget push ./bin/Release/*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://nuget.org + \ No newline at end of file From 53fa61cf7d9e72200eb86ac452e138f6df5b2da7 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 15 Oct 2024 15:12:54 -0700 Subject: [PATCH 13/24] Adding service client creation --- src/Client/IPiecesClient.cs | 10 +++ src/Client/PiecesClient.cs | 5 +- src/Extensions/Pieces.Extensions.AI.csproj | 5 +- .../ServiceCollectionChatClientExtensions.cs | 72 +++++++++++++++++++ 4 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 src/Extensions/ServiceCollectionChatClientExtensions.cs diff --git a/src/Client/IPiecesClient.cs b/src/Client/IPiecesClient.cs index a915f05..3ac54c8 100644 --- a/src/Client/IPiecesClient.cs +++ b/src/Client/IPiecesClient.cs @@ -38,6 +38,16 @@ public interface IPiecesClient /// Task DownloadModelAsync(string modelName, CancellationToken cancellationToken = default); + /// + /// Gets the first model that contains the given name. + /// If no model matches, the first is returned, unless + /// is set, then this throws a . + /// + /// The search string for the model name + /// If false and the model is not found, return the first model. Otherwise throw + /// + Model GetModelFromName(string modelName, bool throwIfNotFound = false); + /// /// Get the Pieces OS version /// diff --git a/src/Client/PiecesClient.cs b/src/Client/PiecesClient.cs index 2178272..bece1f7 100644 --- a/src/Client/PiecesClient.cs +++ b/src/Client/PiecesClient.cs @@ -121,12 +121,13 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli /// /// Gets the first model that contains the given name. - /// If no model matches, the first is returned. + /// If no model matches, the first is returned, unless + /// is set, then this throws a . /// /// The search string for the model name /// If false and the model is not found, return the first model. Otherwise throw /// - private Model GetModelFromName(string modelName, bool throwIfNotFound = false) + public Model GetModelFromName(string modelName, bool throwIfNotFound = false) { var models = piecesApis.ModelsApi.ModelsSnapshot().Iterable; var matchModel = models.FirstOrDefault(x => x.Name.Contains(modelName, StringComparison.OrdinalIgnoreCase)); diff --git a/src/Extensions/Pieces.Extensions.AI.csproj b/src/Extensions/Pieces.Extensions.AI.csproj index e080fff..d47dec3 100644 --- a/src/Extensions/Pieces.Extensions.AI.csproj +++ b/src/Extensions/Pieces.Extensions.AI.csproj @@ -8,8 +8,11 @@ - + + + + diff --git a/src/Extensions/ServiceCollectionChatClientExtensions.cs b/src/Extensions/ServiceCollectionChatClientExtensions.cs new file mode 100644 index 0000000..51f728f --- /dev/null +++ b/src/Extensions/ServiceCollectionChatClientExtensions.cs @@ -0,0 +1,72 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Hosting; + +using Pieces.OS.Client; +using Pieces.Os.Core.SdkModel; + +namespace Pieces.Extensions.AI; + +public static class ServiceCollectionChatClientExtensions +{ + /// + /// Add the ability to create a chat client from a host application builder + /// + /// The host builder to call this on + /// A builder function + /// The Id of the model to use + /// The name of the chat + /// + public static IServiceCollection AddPiecesChatClient( + this IHostApplicationBuilder hostBuilder, + Func? builder = null, + string? modelId = null, + string chatName = "") + { + return hostBuilder.Services.AddPiecesChatClient( + modelId, + chatName, + builder); + } + + /// + /// Add the ability to create a chat client from a service collection + /// + /// The service collection to call this on + /// A builder function + /// The Id of the model to use + /// The name of the chat + /// + public static IServiceCollection AddPiecesChatClient( + this IServiceCollection services, + string? modelId = null, + string chatName = "", + Func? builder = null) + { + return services.AddChatClient(pipeline => + { + builder?.Invoke(pipeline); + var logger = pipeline.Services.GetService(); + + // Create the Pieces client + var client = new PiecesClient(logger); + + Model? model = default; + + if (!string.IsNullOrWhiteSpace(modelId)) + { + // Load the models. This is a sync function, and loading models is async, so do the bad thing and + // get the result to force this to be synchronous + var models = client.GetModelsAsync().Result; + + // Find the first model that matches the Id + // If there is no match, try based off the name instead + model = models.FirstOrDefault(m => m.Id == modelId) ?? client.GetModelFromName(modelId); + } + + // Create the chat client in the pipeline + return pipeline.Use(new PiecesChatClient(client, chatName, logger, model)); + }); + } +} From 39ee399c9c2c595ac09b71f9f8173bf2c3dc11e9 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 15 Oct 2024 17:51:07 -0700 Subject: [PATCH 14/24] Adding the remind me sample app --- .vscode/launch.json | 6 ++ Pieces.OS.Client.sln | 10 ++++ README.md | 19 +++++- src/Extensions/PiecesChatClient.cs | 37 ++++++++++-- .../ServiceCollectionChatClientExtensions.cs | 35 +++++++---- .../Pieces.SampleApps.RemindMe.csproj | 20 +++++++ src/SampleApps/RemindMe/Program.cs | 58 +++++++++++++++++++ src/SampleApps/RemindMe/README.md | 41 +++++++++++++ 8 files changed, 207 insertions(+), 19 deletions(-) create mode 100644 src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj create mode 100644 src/SampleApps/RemindMe/Program.cs create mode 100644 src/SampleApps/RemindMe/README.md diff --git a/.vscode/launch.json b/.vscode/launch.json index 1255a11..f053f33 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -15,6 +15,12 @@ "type": "dotnet", "request": "launch", "projectPath": "${workspaceFolder}/src/Extensions.Example/Pieces.Extensions.AI.Example.csproj" + }, + { + "name": "Remind Me Sample Debug", + "type": "dotnet", + "request": "launch", + "projectPath": "${workspaceFolder}/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj" } ] } \ No newline at end of file diff --git a/Pieces.OS.Client.sln b/Pieces.OS.Client.sln index 47cdcff..c8d011c 100644 --- a/Pieces.OS.Client.sln +++ b/Pieces.OS.Client.sln @@ -25,6 +25,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.Extensions.AI", "src EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.Extensions.AI.Example", "src\Extensions.Example\Pieces.Extensions.AI.Example.csproj", "{444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SampleApps", "SampleApps", "{CC196DE0-D1B4-4AE4-9E00-51E4BE1912BC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Pieces.SampleApps.RemindMe", "src\SampleApps\RemindMe\Pieces.SampleApps.RemindMe.csproj", "{92F96E38-8661-4AD9-AB95-C59C5E082B62}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -58,6 +62,10 @@ Global {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Debug|Any CPU.Build.0 = Debug|Any CPU {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Release|Any CPU.ActiveCfg = Release|Any CPU {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3}.Release|Any CPU.Build.0 = Release|Any CPU + {92F96E38-8661-4AD9-AB95-C59C5E082B62}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {92F96E38-8661-4AD9-AB95-C59C5E082B62}.Debug|Any CPU.Build.0 = Debug|Any CPU + {92F96E38-8661-4AD9-AB95-C59C5E082B62}.Release|Any CPU.ActiveCfg = Release|Any CPU + {92F96E38-8661-4AD9-AB95-C59C5E082B62}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {63B9A93A-AD4D-42C6-9A80-68853FAC4913} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} @@ -70,5 +78,7 @@ Global {9B392DE8-CC81-4B68-AF1F-64909DA5B9CF} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} {14EEEFCD-42A3-4C85-AED5-9C00F0D53811} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} {444BCCA9-ACBE-47EB-88A8-B3CF48113FA3} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} + {CC196DE0-D1B4-4AE4-9E00-51E4BE1912BC} = {0F400239-4FC7-4A56-A1BE-553ECB4EA55E} + {92F96E38-8661-4AD9-AB95-C59C5E082B62} = {CC196DE0-D1B4-4AE4-9E00-51E4BE1912BC} EndGlobalSection EndGlobal diff --git a/README.md b/README.md index 59bc14b..fa7236f 100644 --- a/README.md +++ b/README.md @@ -34,8 +34,8 @@ The Pieces SDK offers the following key features: To get started with the Pieces OS Client SDK, follow these steps: 1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. Download the appropriate version for your operating system: - - [macOS](https://docs.pieces.app/installation-getting-started/macos) - - [Windows](https://docs.pieces.app/installation-getting-started/windows) + - [macOS](https://docs.pieces.app/installation-getting-started/macos) + - [Windows](https://docs.pieces.app/installation-getting-started/windows) - [Linux](https://docs.pieces.app/installation-getting-started/linux) 1. **Install the SDK**: Use nuget to install the Pieces OS Client SDK package: @@ -50,6 +50,21 @@ To get started with the Pieces OS Client SDK, follow these steps: dotnet add package Pieces.Extensions.AI --prerelease ``` +## Projects + +This repo contains the following projects: + +- [Pieces.OS.Client](./src/Client/) - the OS client SDK +- [Pieces.Extensions.AI](./src/Extensions/) - support for Microsoft.Extensions.AI +- [Pieces.Os.Core](./src/Core/) - an internal library wrapping the Pieces OS API +- [Pieces.OS.Client.Example](./src/Client.Example/) - example code for using the Pieces.OS.Client SDK +- [Pieces.Extensions.AI.Example](./src/Extensions.Example/) - example code for using the Pieces.Extensions.AI SDK +- [SampleApps](./src/SampleApps/) - a selection of sample apps + +### Sample apps + +- [Remind Me](./src/SampleApps/RemindMe/) - an app that reminds you about what you have been working on over the last few hours using live context. + ## Pieces.OS.Client Examples There is an example project using the Pieces.OS.Client package in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. diff --git a/src/Extensions/PiecesChatClient.cs b/src/Extensions/PiecesChatClient.cs index f05141b..92a4237 100644 --- a/src/Extensions/PiecesChatClient.cs +++ b/src/Extensions/PiecesChatClient.cs @@ -9,6 +9,31 @@ namespace Pieces.Extensions.AI; public class PiecesChatClient(IPiecesClient piecesClient, string chatName = "", ILogger? logger = null, Model? model = null) : IChatClient { + /// + /// A constant for the name of the AssetsIds property in the AdditionalProperties dictionary + /// + public const string AssetIdsPropertyName = "AssetIds"; + /// + /// A constant for the name of the Files property in the AdditionalProperties dictionary + /// + public const string FilesPropertyName = "Files"; + /// + /// A constant for the name of the Folders property in the AdditionalProperties dictionary + /// + public const string FoldersPropertyName = "Folders"; + /// + /// A constant for the name of the LiveContext property in the AdditionalProperties dictionary + /// + public const string LiveContextPropertyName = "LiveContext"; + /// + /// A constant for the name of the LiveContextTimeSpan property in the AdditionalProperties dictionary + /// + public const string LiveContextTimeSpanPropertyName = "LiveContextTimeSpan"; + /// + /// A constant for the name of the PersistChat property in the AdditionalProperties dictionary + /// + public const string PersistChatPropertyName = "PersistChat"; + private readonly IPiecesClient piecesClient = piecesClient; private readonly string chatName = chatName; private readonly ILogger? logger = logger; @@ -178,7 +203,7 @@ private async Task CacheOrDeleteChatAsync(IList chatMessages, // If we got this chat from the cache, remove the old entry as the messages will be updated to reflect this response chatCache.Remove(chatWithCacheKey.CacheKey); - var persist = GetBoolValueFromOptions(options, "PersistChat", true); + var persist = GetBoolValueFromOptions(options, PersistChatPropertyName, true); if (persist) { @@ -245,11 +270,11 @@ private static ChatContext CreateChatContextFromOptions(ChatOptions? options) { return new ChatContext { - AssetIds = GetValueFromOptions>(options, "AssetIds"), - LiveContext = GetBoolValueFromOptions(options, "LiveContext"), - LiveContextTimeSpan = GetValueFromOptions(options, "LiveContextTimeSpan", null), - Files = GetValueFromOptions>(options, "Files"), - Folders = GetValueFromOptions>(options, "Folders"), + AssetIds = GetValueFromOptions>(options, AssetIdsPropertyName), + LiveContext = GetBoolValueFromOptions(options, LiveContextPropertyName), + LiveContextTimeSpan = GetValueFromOptions(options, LiveContextTimeSpanPropertyName, null), + Files = GetValueFromOptions>(options, FilesPropertyName), + Folders = GetValueFromOptions>(options, FoldersPropertyName), }; } diff --git a/src/Extensions/ServiceCollectionChatClientExtensions.cs b/src/Extensions/ServiceCollectionChatClientExtensions.cs index 51f728f..319d9b7 100644 --- a/src/Extensions/ServiceCollectionChatClientExtensions.cs +++ b/src/Extensions/ServiceCollectionChatClientExtensions.cs @@ -15,18 +15,20 @@ public static class ServiceCollectionChatClientExtensions /// /// The host builder to call this on /// A builder function - /// The Id of the model to use + /// The Id or name of the model to use /// The name of the chat /// public static IServiceCollection AddPiecesChatClient( this IHostApplicationBuilder hostBuilder, Func? builder = null, - string? modelId = null, - string chatName = "") + string? model = null, + string chatName = "", + IPiecesClient? piecesClient = null) { return hostBuilder.Services.AddPiecesChatClient( - modelId, + model, chatName, + piecesClient, builder); } @@ -35,26 +37,37 @@ public static IServiceCollection AddPiecesChatClient( /// /// The service collection to call this on /// A builder function - /// The Id of the model to use + /// The Id or name of the model to use /// The name of the chat /// public static IServiceCollection AddPiecesChatClient( this IServiceCollection services, - string? modelId = null, + string? model = null, string chatName = "", + IPiecesClient? piecesClient = null, Func? builder = null) { return services.AddChatClient(pipeline => { + builder?.Invoke(pipeline); + + // Get the logger var logger = pipeline.Services.GetService(); + // If the logger is not created yet, create a Pieces logger using the logger factory if it exists + if (logger is null) + { + var loggerFactory = pipeline.Services.GetService(); + logger = loggerFactory?.CreateLogger("Pieces logger"); + } + // Create the Pieces client - var client = new PiecesClient(logger); + var client = piecesClient ?? new PiecesClient(logger); - Model? model = default; + Model? piecesModel = default; - if (!string.IsNullOrWhiteSpace(modelId)) + if (!string.IsNullOrWhiteSpace(model)) { // Load the models. This is a sync function, and loading models is async, so do the bad thing and // get the result to force this to be synchronous @@ -62,11 +75,11 @@ public static IServiceCollection AddPiecesChatClient( // Find the first model that matches the Id // If there is no match, try based off the name instead - model = models.FirstOrDefault(m => m.Id == modelId) ?? client.GetModelFromName(modelId); + piecesModel = models.FirstOrDefault(m => m.Id == model) ?? client.GetModelFromName(model); } // Create the chat client in the pipeline - return pipeline.Use(new PiecesChatClient(client, chatName, logger, model)); + return pipeline.Use(new PiecesChatClient(client, chatName, logger, piecesModel)); }); } } diff --git a/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj b/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj new file mode 100644 index 0000000..a1df555 --- /dev/null +++ b/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj @@ -0,0 +1,20 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + + + diff --git a/src/SampleApps/RemindMe/Program.cs b/src/SampleApps/RemindMe/Program.cs new file mode 100644 index 0000000..5e36711 --- /dev/null +++ b/src/SampleApps/RemindMe/Program.cs @@ -0,0 +1,58 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Pieces.Extensions.AI; + +// Set up the service collection with logging and the chat client +var chatName = $"Recent work reminder {DateTime.Now.ToShortDateString()} {DateTime.Now.ToShortTimeString()}"; +var services = new ServiceCollection(); +services.AddLogging(builder => builder.AddConsole()) + .AddPiecesChatClient(model: "Claude 3.5 sonnet", chatName: chatName); +var serviceProvider = services.BuildServiceProvider(); + +// Get the chat client +var chatClient = serviceProvider.GetRequiredService(); + +// Set up the options for the chat +// We need live context over the last few hours +var options = new ChatOptions +{ + AdditionalProperties = new AdditionalPropertiesDictionary + { + { PiecesChatClient.LiveContextPropertyName, true}, + { PiecesChatClient.LiveContextTimeSpanPropertyName, TimeSpan.FromHours(6) }, + } +}; + +// Create the chat message +IList chatMessages = []; + +// Add a system prompt depending on the day + +// If we are international talk like a pirate day (Sept 19th), ask for responses in the style of a pirate +if (DateTime.Now.Month == 9 && DateTime.Now.Day == 19) +{ + chatMessages.Add(new(ChatRole.System, "Answer all questions in the style of a pirate.")); +} + +// If we are Star Wars day (May 4th), ask for responses in the style of a pirate +if (DateTime.Now.Month == 5 && DateTime.Now.Day == 4) +{ + chatMessages.Add(new(ChatRole.System, "Answer all questions in the style of Yoda from star wars.")); +} + +chatMessages.Add(new(ChatRole.User, "Give me a summary of all the activities I was doing over the last few hours")); + +// Send the message and stream the result +await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options).ConfigureAwait(false)) +{ + // The last message has a copy of the entire text, so only log the non-stop messages + if (r.FinishReason != ChatFinishReason.Stop) + { + Console.Write(r.Text); + } +} + +// Write more instructions for the user +Console.WriteLine(""); +Console.WriteLine($"This conversation is persisted in Pieces, called {chatName}. You can ask for more details in this chat in your favorite Pieces extension, or in the desktop app."); diff --git a/src/SampleApps/RemindMe/README.md b/src/SampleApps/RemindMe/README.md new file mode 100644 index 0000000..4c64a70 --- /dev/null +++ b/src/SampleApps/RemindMe/README.md @@ -0,0 +1,41 @@ +# Remind Me + +This is an example app using the [Pieces.Extensions.AI](https://www.nuget.org/packages/Pieces.Extensions.AI/) package to show how to build a small application that leverages live context. This application reminds you of what you were doing over the past few hours. + +## How this app works + +This application creates the chat client, and sends a message to get the activities over the last few hours using live context. This is controlled by the `ChatOptions`: + +```csharp +var options = new ChatOptions +{ + AdditionalProperties = new AdditionalPropertiesDictionary + { + { PiecesChatClient.LiveContextPropertyName, true}, + { PiecesChatClient.LiveContextTimeSpanPropertyName, TimeSpan.FromHours(6) }, + } +}; +``` + +This uses a single chat message to get what you were doing over the last few hours: + +```csharp +chatMessages.Add(new(ChatRole.User, "Give me a summary of all the activities I was doing over the last few hours")); +``` + +As a fun easter egg, depending on the day of the year, this will also add a system prompt to guide the output, such as giving the response like a pirate on [international talk like a pirate day](https://talklikeapirate.com), or like Yoda on [Star Wars Day](https://www.starwars.com/star-wars-day). + +## Run the code + +To run this code, follow these steps: + +1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. If you don't have it installed and running alredy, download the appropriate version for your operating system: + - [macOS](https://docs.pieces.app/installation-getting-started/macos) + - [Windows](https://docs.pieces.app/installation-getting-started/windows) + - [Linux](https://docs.pieces.app/installation-getting-started/linux) +1. Ensure the Workstream Pattern Engine is running. +1. Build and run the app: + + ```shell + dotnet run + ``` From 39c6dad20de32c01531f3cf2cc81f01a27199c22 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Tue, 15 Oct 2024 18:18:42 -0700 Subject: [PATCH 15/24] Using service builder --- .../ServiceCollectionChatClientExtensions.cs | 10 ++++++---- .../RemindMe/Pieces.SampleApps.RemindMe.csproj | 2 ++ src/SampleApps/RemindMe/Program.cs | 7 +++++-- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/Extensions/ServiceCollectionChatClientExtensions.cs b/src/Extensions/ServiceCollectionChatClientExtensions.cs index 319d9b7..0d2cd11 100644 --- a/src/Extensions/ServiceCollectionChatClientExtensions.cs +++ b/src/Extensions/ServiceCollectionChatClientExtensions.cs @@ -17,6 +17,7 @@ public static class ServiceCollectionChatClientExtensions /// A builder function /// The Id or name of the model to use /// The name of the chat + /// The pieces client to use, or create one if this is null /// public static IServiceCollection AddPiecesChatClient( this IHostApplicationBuilder hostBuilder, @@ -39,6 +40,7 @@ public static IServiceCollection AddPiecesChatClient( /// A builder function /// The Id or name of the model to use /// The name of the chat + /// The pieces client to use, or create one if this is null /// public static IServiceCollection AddPiecesChatClient( this IServiceCollection services, @@ -63,7 +65,7 @@ public static IServiceCollection AddPiecesChatClient( } // Create the Pieces client - var client = piecesClient ?? new PiecesClient(logger); + piecesClient ??= new PiecesClient(logger); Model? piecesModel = default; @@ -71,15 +73,15 @@ public static IServiceCollection AddPiecesChatClient( { // Load the models. This is a sync function, and loading models is async, so do the bad thing and // get the result to force this to be synchronous - var models = client.GetModelsAsync().Result; + var models = piecesClient.GetModelsAsync().Result; // Find the first model that matches the Id // If there is no match, try based off the name instead - piecesModel = models.FirstOrDefault(m => m.Id == model) ?? client.GetModelFromName(model); + piecesModel = models.FirstOrDefault(m => m.Id == model) ?? piecesClient.GetModelFromName(model); } // Create the chat client in the pipeline - return pipeline.Use(new PiecesChatClient(client, chatName, logger, piecesModel)); + return pipeline.Use(new PiecesChatClient(piecesClient, chatName, logger, piecesModel)); }); } } diff --git a/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj b/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj index a1df555..a7172b1 100644 --- a/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj +++ b/src/SampleApps/RemindMe/Pieces.SampleApps.RemindMe.csproj @@ -15,6 +15,8 @@ + + diff --git a/src/SampleApps/RemindMe/Program.cs b/src/SampleApps/RemindMe/Program.cs index 5e36711..421c6e1 100644 --- a/src/SampleApps/RemindMe/Program.cs +++ b/src/SampleApps/RemindMe/Program.cs @@ -2,14 +2,17 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Pieces.Extensions.AI; +using Pieces.OS.Client; // Set up the service collection with logging and the chat client var chatName = $"Recent work reminder {DateTime.Now.ToShortDateString()} {DateTime.Now.ToShortTimeString()}"; var services = new ServiceCollection(); -services.AddLogging(builder => builder.AddConsole()) - .AddPiecesChatClient(model: "Claude 3.5 sonnet", chatName: chatName); +services.AddLogging(builder => builder.AddConsole()); +services.AddChatClient(builder => builder.UseLogging() + .Use(new PiecesChatClient(new PiecesClient()))); var serviceProvider = services.BuildServiceProvider(); + // Get the chat client var chatClient = serviceProvider.GetRequiredService(); From 3a8ed3bdb4e9411b2508b0425f4a576caad18a3a Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Wed, 16 Oct 2024 09:25:26 -0700 Subject: [PATCH 16/24] Added missing chat name parameter --- src/SampleApps/RemindMe/Program.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/SampleApps/RemindMe/Program.cs b/src/SampleApps/RemindMe/Program.cs index 421c6e1..dc9abf3 100644 --- a/src/SampleApps/RemindMe/Program.cs +++ b/src/SampleApps/RemindMe/Program.cs @@ -9,7 +9,7 @@ var services = new ServiceCollection(); services.AddLogging(builder => builder.AddConsole()); services.AddChatClient(builder => builder.UseLogging() - .Use(new PiecesChatClient(new PiecesClient()))); + .Use(new PiecesChatClient(new PiecesClient(), chatName: chatName))); var serviceProvider = services.BuildServiceProvider(); From 2c218eba157db757420ea49cc11e5bc635274e78 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Wed, 16 Oct 2024 16:47:09 -0700 Subject: [PATCH 17/24] Fixing nuget publishing as a 2-step tag process --- .github/workflows/publish-client-nuget.yaml | 41 +++++++++++++++++++ ...get.yaml => publish-extensions-nuget.yaml} | 29 +++++-------- README.md | 17 ++++++++ 3 files changed, 69 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/publish-client-nuget.yaml rename .github/workflows/{publish-nuget.yaml => publish-extensions-nuget.yaml} (54%) diff --git a/.github/workflows/publish-client-nuget.yaml b/.github/workflows/publish-client-nuget.yaml new file mode 100644 index 0000000..a6df766 --- /dev/null +++ b/.github/workflows/publish-client-nuget.yaml @@ -0,0 +1,41 @@ +name: Release + +on: + push: + tags: + - 'client-*' + +jobs: + publish-client-to-nuget: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # Set up .NET 8 + - name: Setup .NET + uses: actions/setup-dotnet@v4 + with: + dotnet-version: 8.0.* + + # Build the main client nuget package + - name: Build Client + working-directory: ./src/Client + run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/client-v} + + # Download the nuget signing certificate to a local file from the Actions secret + - name: Get the nuget signing certificate + id: cert_file + uses: timheuer/base64-to-file@v1.2 + with: + fileName: 'certfile.pfx' + encodedString: ${{ secrets.NUGET_CERTIFICATE }} + + # Sign the Client nuget package with the certificate from the Actions secret + - name: Sign the Client nuget package + working-directory: ./src/Client + run: dotnet nuget sign ./bin/Release/Pieces.OS.Client.${GITHUB_REF#refs/tags/client-v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com + + # Push the client nuget package to nuget + - name: Push the Client package to NuGet + working-directory: ./src/Client + run: dotnet nuget push ./bin/Release/*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://nuget.org diff --git a/.github/workflows/publish-nuget.yaml b/.github/workflows/publish-extensions-nuget.yaml similarity index 54% rename from .github/workflows/publish-nuget.yaml rename to .github/workflows/publish-extensions-nuget.yaml index 4881720..49ad06d 100644 --- a/.github/workflows/publish-nuget.yaml +++ b/.github/workflows/publish-extensions-nuget.yaml @@ -3,10 +3,10 @@ name: Release on: push: tags: - - '*' + - 'extensions-*' jobs: - publish-to-nuget: + publish-extensions-to-nuget: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -17,15 +17,18 @@ jobs: with: dotnet-version: 8.0.* - # Build the main client nuget package - - name: Build Client - working-directory: ./src/Client - run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/v} + # Update the Pieces.OS.Client to use the latest nuget package, not the local packages + - name: Update the Pieces.OS.Client nuget package + working-directory: ./src/Extensions + run: | + dotnet remove reference ../Core/src/Pieces.Os.Core/Pieces.Os.Core.csproj + dotnet remove reference ../Client/Pieces.OS.Client.csproj + dotnet add package Pieces.OS.Client --prerelease # Build the Microsoft.Extensions.AI implementation package - name: Build AI Extension working-directory: ./src/Extensions - run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/v} + run: dotnet build -c Release -p:Version=${GITHUB_REF#refs/tags/extensions-v} # Download the nuget signing certificate to a local file from the Actions secret - name: Get the nuget signing certificate @@ -35,20 +38,10 @@ jobs: fileName: 'certfile.pfx' encodedString: ${{ secrets.NUGET_CERTIFICATE }} - # Sign the Client nuget package with the certificate from the Actions secret - - name: Sign the Client nuget package - working-directory: ./src/Client - run: dotnet nuget sign ./bin/Release/Pieces.OS.Client.${GITHUB_REF#refs/tags/v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com - # Sign the Microsoft.Extensions.AI implementation nuget package with the certificate from the Actions secret - name: Sign the Extensions nuget package working-directory: ./src/Extensions - run: dotnet nuget sign ./bin/Release/Pieces.Extensions.AI.${GITHUB_REF#refs/tags/v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com - - # Push the client nuget package to nuget - - name: Push the Client package to NuGet - working-directory: ./src/Client - run: dotnet nuget push ./bin/Release/*.nupkg -k ${{ secrets.NUGET_API_KEY }} -s https://nuget.org + run: dotnet nuget sign ./bin/Release/Pieces.Extensions.AI.${GITHUB_REF#refs/tags/extensions-v}.nupkg --certificate-path ${{ steps.cert_file.outputs.filePath }} --certificate-password ${{ secrets.NUGET_CERTIFICATE_PASSWORD }} --timestamper http://timestamp.digicert.com # Push the Microsoft.Extensions.AI implementation nuget package to nuget - name: Push the Extensions package to NuGet diff --git a/README.md b/README.md index fa7236f..cb47095 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,9 @@ [![GitHub contributors](https://img.shields.io/github/contributors/pieces-app/pieces-os-client-sdk-for-csharp.svg)](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/graphs/contributors) [![GitHub issues by-label](https://img.shields.io/github/issues/pieces-app/pieces-os-client-sdk-for-csharp)](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/issues) +![Publish Client Workflow Status](https://img.shields.io/github/actions/workflow/status/pieces-app/pieces-os-client-sdk-for-csharp/publish-client-nuget.yaml) +![Publish Extensions Workflow Status](https://img.shields.io/github/actions/workflow/status/pieces-app/pieces-os-client-sdk-for-csharp/publish-extensions-nuget.yaml) + [![Pieces.OS.Client SDK on nuget](https://img.shields.io/nuget/vpre/Pieces.OS.Client)](https://www.nuget.org/packages/Pieces.OS.Client/) [![Pieces.Extensions.AI SDK on nuget](https://img.shields.io/nuget/vpre/Pieces.Extensions.AI)](https://www.nuget.org/packages/Pieces.Extensions.AI/) [![Discord](https://img.shields.io/badge/Discord-@layer5.svg?color=7389D8&label&logo=discord&logoColor=ffffff)](https://discord.gg/getpieces) @@ -76,3 +79,17 @@ Details of this example project is provided in the [src/Client/README.md file](. There is an example project using the Pieces.Extensions.AI package in the [`./src/Extensions.Example`](./src/Extensions.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Extensions.Example`](./src/Extensions.Example) folder. Details of this example project is provided in the [src/Extensions/README.md file](./src/Extensions/README.md). + +## Publish the packages + +This repo includes GitHub actions to publish both the Pieces.OS.Client and Pieces.Extensions.AI packages. + +The Pieces.Extensions.AI package is dependent on the Pieces.OS.Client package. The way this is implemented is: + +- In this repo, the Pieces.Extensions.AI project has a project dependency on Pieces.OS.Client and Pieces.OS.Core +- In the action to publish, this project dependency is removed, and a nuget package dependency is added to the latest pre-release Pieces.OS.Core. + +To publish these packages, do the following: + +- Tag this repo using a tag of `client-v` where `` is the version string. For example, to release `0.0.10-beta` you would tag with `client-v0.0.10-beta`. This will only build and publish the Pieces.OS.Client package. +- Once the Pieces.OS.Client package has been validated and made public on nuget, tag the repo using a tag of `extensions-v`, for example `extensions-v0.0.10-beta`. This will build the Pieces.Extensions.AI package using the nuget package reference to Pieces.OS.Client, and publish to nuget. From a65694f5e10cf9b2e7f03d66ec2173fa0ccf8153 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Thu, 17 Oct 2024 12:54:27 -0700 Subject: [PATCH 18/24] Adding example for folders --- src/Extensions.Example/Program.cs | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/src/Extensions.Example/Program.cs b/src/Extensions.Example/Program.cs index 613b30e..749525a 100644 --- a/src/Extensions.Example/Program.cs +++ b/src/Extensions.Example/Program.cs @@ -374,4 +374,32 @@ // Console.WriteLine(response.Message.Text); // } -#endregion Chat client with a model \ No newline at end of file +#endregion Chat client with a model + +#region Chat about a folder + +// This example chats about the current folder + +// { +// // Create a Chat completion +// IChatClient chatClient = new PiecesChatClient(client, chatName: $"Chat about a folder - {DateTime.Now.ToShortTimeString()}", logger: logger); + +// // Set the system prompt +// var chatMessages = new List{ +// new(ChatRole.User, "Describe this C# project") +// }; + +// var options = new ChatOptions() +// { +// AdditionalProperties = new AdditionalPropertiesDictionary{ +// { "Folders", new List{Environment.CurrentDirectory} }, +// } +// }; + +// await foreach (var r in chatClient.CompleteStreamingAsync(chatMessages, options).ConfigureAwait(false)) +// { +// Console.Write(r.Text); +// } +// } + +#endregion Chat about a folder \ No newline at end of file From 99de17ee95c0517dda1554da36ed3a076d548f0d Mon Sep 17 00:00:00 2001 From: ellie-at-pieces Date: Fri, 18 Oct 2024 16:25:33 +0000 Subject: [PATCH 19/24] fix typo --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index cb47095..8c2725f 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ The Pieces SDK offers the following key features: 1. Copilot Chats: Communicate seamlessly with copilot chats functionality. 1. Asset Management: Save and manage assets and formats efficiently. 1. Local Server Interaction: Interact with a locally hosted server for various functionality. -1. Multi LLMs support: Use any Pieces supported LLM to power your app. +1. Multi-LLM support: Use any Pieces supported LLM to power your app. 1. File, folder, and live context in copilot chats ## Installation @@ -70,13 +70,13 @@ This repo contains the following projects: ## Pieces.OS.Client Examples -There is an example project using the Pieces.OS.Client package in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. +There is an example project using the Pieces.OS.Client package in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. Details of this example project is provided in the [src/Client/README.md file](./src/Client/README.md). ## Pieces.Extensions.AI Examples -There is an example project using the Pieces.Extensions.AI package in the [`./src/Extensions.Example`](./src/Extensions.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`./src/Extensions.Example`](./src/Extensions.Example) folder. +There is an example project using the Pieces.Extensions.AI package in the [`./src/Extensions.Example`](./src/Extensions.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`./src/Extensions.Example`](./src/Extensions.Example) folder. Details of this example project is provided in the [src/Extensions/README.md file](./src/Extensions/README.md). From b5fb1c9f014648a1ba7c5450c1f442ffe41477f8 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Fri, 18 Oct 2024 09:34:52 -0700 Subject: [PATCH 20/24] Making get by name async --- src/Client.Example/Program.cs | 4 ++-- src/Client/IPiecesClient.cs | 2 +- src/Client/PiecesClient.cs | 9 +++++---- src/Extensions/ServiceCollectionChatClientExtensions.cs | 3 +-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/Client.Example/Program.cs b/src/Client.Example/Program.cs index 576ffae..a796725 100644 --- a/src/Client.Example/Program.cs +++ b/src/Client.Example/Program.cs @@ -446,8 +446,8 @@ // { // var models = await client.GetModelsAsync().ConfigureAwait(false); -// var llamaModel = models.First(m => m.Name.Contains("Llama-3", StringComparison.CurrentCultureIgnoreCase)); -// var phi3Model = models.First(m => m.Name.Contains("Phi-3", StringComparison.CurrentCultureIgnoreCase)); +// var llamaModel = await client.GetModelByNameAsync("Llama-3").ConfigureAwait(false); +// var phi3Model = await client.GetModelByNameAsync("Phi-3").ConfigureAwait(false); // var chatModel = copilot.Model.Id == llamaModel.Id ? phi3Model : llamaModel; diff --git a/src/Client/IPiecesClient.cs b/src/Client/IPiecesClient.cs index 3ac54c8..aa1a1b3 100644 --- a/src/Client/IPiecesClient.cs +++ b/src/Client/IPiecesClient.cs @@ -46,7 +46,7 @@ public interface IPiecesClient /// The search string for the model name /// If false and the model is not found, return the first model. Otherwise throw /// - Model GetModelFromName(string modelName, bool throwIfNotFound = false); + Task GetModelByNameAsync(string modelName, bool throwIfNotFound = false); /// /// Get the Pieces OS version diff --git a/src/Client/PiecesClient.cs b/src/Client/PiecesClient.cs index bece1f7..c6597e6 100644 --- a/src/Client/PiecesClient.cs +++ b/src/Client/PiecesClient.cs @@ -111,7 +111,8 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli logger?.LogInformation("Web sockets started"); // Get all the models to pick a default - choose GPT-4o if it is available - var defaultModel = GetModelFromName("GPT-4o"); + var models = piecesApis.ModelsApi.ModelsSnapshot().Iterable; + var defaultModel = models.FirstOrDefault(x => x.Name.Contains("GPT-4o Chat", StringComparison.OrdinalIgnoreCase)); copilot = new PiecesCopilot(logger, defaultModel, application!, qgptWebSocket, piecesApis); assets = new PiecesAssets(logger, application!, new AssetApi(apiClient, apiClient, configuration), new AssetsApi(apiClient, apiClient, configuration)); @@ -127,9 +128,9 @@ public PiecesClient(ILogger? logger = null, string? baseUrl = null, string appli /// The search string for the model name /// If false and the model is not found, return the first model. Otherwise throw /// - public Model GetModelFromName(string modelName, bool throwIfNotFound = false) + public async Task GetModelByNameAsync(string modelName, bool throwIfNotFound = false) { - var models = piecesApis.ModelsApi.ModelsSnapshot().Iterable; + var models = await GetModelsAsync().ConfigureAwait(false); var matchModel = models.FirstOrDefault(x => x.Name.Contains(modelName, StringComparison.OrdinalIgnoreCase)); if (matchModel == null) @@ -248,7 +249,7 @@ public async Task DownloadModelAsync(Model model, CancellationToken cance /// public async Task DownloadModelAsync(string modelName, CancellationToken cancellationToken = default) { - var model = GetModelFromName(modelName, true); + var model = await GetModelByNameAsync(modelName, true).ConfigureAwait(false); return await DownloadModelAsync(model, cancellationToken).ConfigureAwait(false); } diff --git a/src/Extensions/ServiceCollectionChatClientExtensions.cs b/src/Extensions/ServiceCollectionChatClientExtensions.cs index 0d2cd11..9685747 100644 --- a/src/Extensions/ServiceCollectionChatClientExtensions.cs +++ b/src/Extensions/ServiceCollectionChatClientExtensions.cs @@ -51,7 +51,6 @@ public static IServiceCollection AddPiecesChatClient( { return services.AddChatClient(pipeline => { - builder?.Invoke(pipeline); // Get the logger @@ -77,7 +76,7 @@ public static IServiceCollection AddPiecesChatClient( // Find the first model that matches the Id // If there is no match, try based off the name instead - piecesModel = models.FirstOrDefault(m => m.Id == model) ?? piecesClient.GetModelFromName(model); + piecesModel = models.FirstOrDefault(m => m.Id == model) ?? piecesClient.GetModelByNameAsync(model).Result; } // Create the chat client in the pipeline From 906c079cf0b706492cdb154f367b6262037f1afb Mon Sep 17 00:00:00 2001 From: ellie-at-pieces Date: Fri, 18 Oct 2024 17:19:44 +0000 Subject: [PATCH 21/24] fix typos --- README.md | 4 ++-- src/Client/README.md | 4 ++-- src/Extensions/README.md | 6 +++--- src/SampleApps/RemindMe/README.md | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 8c2725f..636bb32 100644 --- a/README.md +++ b/README.md @@ -72,13 +72,13 @@ This repo contains the following projects: There is an example project using the Pieces.OS.Client package in the [`./src/Client.Example`](./src/Client.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`./src/Client.Example`](./src/Client.Example) folder. -Details of this example project is provided in the [src/Client/README.md file](./src/Client/README.md). +Details of this example project are provided in the [src/Client/README.md file](./src/Client/README.md). ## Pieces.Extensions.AI Examples There is an example project using the Pieces.Extensions.AI package in the [`./src/Extensions.Example`](./src/Extensions.Example) folder. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`./src/Extensions.Example`](./src/Extensions.Example) folder. -Details of this example project is provided in the [src/Extensions/README.md file](./src/Extensions/README.md). +Details of this example project are provided in the [src/Extensions/README.md file](./src/Extensions/README.md). ## Publish the packages diff --git a/src/Client/README.md b/src/Client/README.md index 50d32df..3aff073 100644 --- a/src/Client/README.md +++ b/src/Client/README.md @@ -9,7 +9,7 @@ The Pieces SDK offers the following key features: 1. Copilot Chats: Communicate seamlessly with copilot chats functionality. 1. Asset Management: Save and manage assets and formats efficiently. 1. Local Server Interaction: Interact with a locally hosted server for various functionality. -1. Multi LLMs support: Use any Pieces supported LLM to power your app. +1. Multi-LLM support: Use any Pieces supported LLM to power your app. ## Installation @@ -28,7 +28,7 @@ To get started with the Pieces OS Client SDK, follow these steps: ## Pieces Client Examples -There is an example project using the Pieces Client in the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example) project. +There is an example project using the Pieces Client in the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`Client.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Client.Example) project. ## Quickstart diff --git a/src/Extensions/README.md b/src/Extensions/README.md index da58d89..17437e0 100644 --- a/src/Extensions/README.md +++ b/src/Extensions/README.md @@ -27,7 +27,7 @@ To get started with the Pieces.Extensions.AI, follow these steps: ## Pieces.Extensions.AI Examples -There is an example project using the Pieces.Extensions.AI package in the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you wan to run, then run `dotnet run` from the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example) project. +There is an example project using the Pieces.Extensions.AI package in the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example/Program.cs) project. This example is a console app containing a range of different examples, each commented out. To run these examples, uncomment the one you want to run, then run `dotnet run` from the [`Extensions.Example`](https://github.com/pieces-app/pieces-os-client-sdk-for-csharp/tree/main/src/Extensions.Example) project. ## Quickstart @@ -53,7 +53,7 @@ var assets = await client.GetAssetsAsync().ConfigureAwait(false); IChatClient chatClient = new PiecesChatClient(client, chatName: "A new chat", logger: logger); ``` -When you use the chat client, a new conversation is create in Pieces, and each time you call `chatClient.CompleteAsync` or `chatClient.CompleteStreamingAsync`, the same conversation is continued - assuming that the original chat messages have not changed. If this happens, a new conversation is created. +When you use the chat client, a new conversation is created in Pieces, and each time you call `chatClient.CompleteAsync` or `chatClient.CompleteStreamingAsync`, the same conversation is continued - assuming that the original chat messages have not changed. If this happens, a new conversation is created. You can configure the chat with the following `ChatOptions` settings: @@ -65,7 +65,7 @@ You can also set the following in the `ChatOptions.AdditionalProperties` diction | Setting | Type | Default | Description | | --------------------- | ---------------------- | -------- | ----------- | -| `PersistChat` | `bool` | `true` | Should the conversation be saved in Pieces> If this is false, after each chat message, the conversation is deleted and a new one started. | +| `PersistChat` | `bool` | `true` | Should the conversation be saved in Pieces? If this is false, after each chat message, the conversation is deleted and a new one started. | | `AssetIds` | `IEnumerable?` | `null` | A list of asset Ids to add as context to this conversation. | | `Files` | `IEnumerable?` | `null` | A list of file paths to add as context to this conversation. | | `Folders` | `IEnumerable?` | `null` | A list of folder paths to add as context to this conversation. | diff --git a/src/SampleApps/RemindMe/README.md b/src/SampleApps/RemindMe/README.md index 4c64a70..23de3e4 100644 --- a/src/SampleApps/RemindMe/README.md +++ b/src/SampleApps/RemindMe/README.md @@ -29,7 +29,7 @@ As a fun easter egg, depending on the day of the year, this will also add a syst To run this code, follow these steps: -1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. If you don't have it installed and running alredy, download the appropriate version for your operating system: +1. **Download Pieces OS**: Pieces OS serves as the primary backend service, providing essential functionality for the SDK. If you don't have it installed and running already, download the appropriate version for your operating system: - [macOS](https://docs.pieces.app/installation-getting-started/macos) - [Windows](https://docs.pieces.app/installation-getting-started/windows) - [Linux](https://docs.pieces.app/installation-getting-started/linux) From a7b097ece480f141c3ac6c90f5b45782f5e98ef3 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Fri, 18 Oct 2024 12:15:38 -0700 Subject: [PATCH 22/24] Fixing enum --- src/Client.Example/Program.cs | 312 +++++++++++++++++----------------- 1 file changed, 156 insertions(+), 156 deletions(-) diff --git a/src/Client.Example/Program.cs b/src/Client.Example/Program.cs index a796725..d4fca80 100644 --- a/src/Client.Example/Program.cs +++ b/src/Client.Example/Program.cs @@ -476,161 +476,161 @@ // This example shows how to seed a conversation with a set of messages // That are used in the conversation -{ - // var seeds = new List{ - // new(QGPTConversationMessageRoleEnum.SYSTEM, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), - // new(QGPTConversationMessageRoleEnum.USER, "How can I make a web request"), - // new(QGPTConversationMessageRoleEnum.ASSISTANT, @"To make a web request in a programming language, you typically use an HTTP client library. Below are examples in a few popular languages: - - // ### Python - // You can use the `requests` library to make web requests in Python. - - // ```python - // import requests - - // response = requests.get('https://api.example.com/data') - // if response.status_code == 200: - // print(response.json()) - // else: - // print(f'Error: {response.status_code}') - // ``` - - // ### JavaScript (using Fetch API) - // In JavaScript, you can use the Fetch API to make web requests. - - // ```javascript - // fetch('https://api.example.com/data') - // .then(response => { - // if (!response.ok) { - // throw new Error('Network response was not ok ' + response.statusText); - // } - // return response.json(); - // }) - // .then(data => console.log(data)) - // .catch(error => console.error('There was a problem with the fetch operation:', error)); - // ``` - - // ### Java (using HttpURLConnection) - // In Java, you can use `HttpURLConnection` to make web requests. - - // ```java - // import java.io.BufferedReader; - // import java.io.InputStreamReader; - // import java.net.HttpURLConnection; - // import java.net.URL; - - // public class WebRequestExample { - // public static void main(String[] args) { - // try { - // URL url = new URL(""https://api.example.com/data\""); - // HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - // conn.setRequestMethod(""GET""); - - // int responseCode = conn.getResponseCode(); - // if (responseCode == HttpURLConnection.HTTP_OK) { - // BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); - // String inputLine; - // StringBuilder response = new StringBuilder(); - - // while ((inputLine = in.readLine()) != null) { - // response.append(inputLine); - // } - // in.close(); - // System.out.println(response.toString()); - // } else { - // System.out.println(""GET request failed: "" + responseCode); - // } - // } catch (Exception e) { - // e.printStackTrace(); - // } - // } - // } - // ``` - - // ### C# (using HttpClient) - // In C#, you can use `HttpClient` to make web requests. - - // ```csharp - // using System; - // using System.Net.Http; - // using System.Threading.Tasks; - - // class Program - // { - // static async Task Main() - // { - // using (HttpClient client = new HttpClient()) - // { - // HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); - // if (response.IsSuccessStatusCode) - // { - // string data = await response.Content.ReadAsStringAsync(); - // Console.WriteLine(data); - // } - // else - // { - // Console.WriteLine($""Error: {response.StatusCode}""); - // } - // } - // } - // } - // ``` - - // Choose the example that fits the programming language you are using!"), - // new(QGPTConversationMessageRoleEnum.USER, "I am using C#"), - // new(QGPTConversationMessageRoleEnum.ASSISTANT, @"Great! Since you're using C#, you can use the `HttpClient` class to make web requests. Here's a simple example of how to perform a GET request: - - // ```csharp - // using System; - // using System.Net.Http; - // using System.Threading.Tasks; - - // class Program - // { - // static async Task Main() - // { - // using (HttpClient client = new HttpClient()) - // { - // try - // { - // HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); - // if (response.IsSuccessStatusCode) - // { - // string data = await response.Content.ReadAsStringAsync(); - // Console.WriteLine(data); - // } - // else - // { - // Console.WriteLine($""Error: {response.StatusCode}""); - // } - // } - // catch (Exception e) - // { - // Console.WriteLine($""Exception occurred: {e.Message}""); - // } - // } - // } - // } - // ``` - - // ### Explanation: - // - **HttpClient**: This class is used to send HTTP requests and receive HTTP responses from a resource identified by a URI. - // - **GetAsync**: This method sends a GET request to the specified URI. - // - **IsSuccessStatusCode**: This property checks if the response status code indicates success (2xx). - // - **ReadAsStringAsync**: This method reads the response content as a string asynchronously. - - // Make sure to replace `""https://api.example.com/data""` with the actual URL you want to request. If you have any specific requirements or questions, feel free to ask!"), - // }; - - // var chat = await copilot.CreateSeededChatAsync("Question on async tasks", seeds: seeds).ConfigureAwait(false); - - // var question = "Comment this code"; - // var response = await chat.AskQuestionAsync(question); - - // Console.WriteLine(question); - // Console.WriteLine(); - // Console.WriteLine(response); - // Console.WriteLine(); -} +// { +// var seeds = new List{ +// new(Pieces.OS.Client.Copilot.Role.System, "Answer every question from now on in the style of a pirate. Start every response with 'Hey matey!'."), +// new(Pieces.OS.Client.Copilot.Role.User, "How can I make a web request"), +// new(Pieces.OS.Client.Copilot.Role.Assistant, @"To make a web request in a programming language, you typically use an HTTP client library. Below are examples in a few popular languages: + +// ### Python +// You can use the `requests` library to make web requests in Python. + +// ```python +// import requests + +// response = requests.get('https://api.example.com/data') +// if response.status_code == 200: +// print(response.json()) +// else: +// print(f'Error: {response.status_code}') +// ``` + +// ### JavaScript (using Fetch API) +// In JavaScript, you can use the Fetch API to make web requests. + +// ```javascript +// fetch('https://api.example.com/data') +// .then(response => { +// if (!response.ok) { +// throw new Error('Network response was not ok ' + response.statusText); +// } +// return response.json(); +// }) +// .then(data => console.log(data)) +// .catch(error => console.error('There was a problem with the fetch operation:', error)); +// ``` + +// ### Java (using HttpURLConnection) +// In Java, you can use `HttpURLConnection` to make web requests. + +// ```java +// import java.io.BufferedReader; +// import java.io.InputStreamReader; +// import java.net.HttpURLConnection; +// import java.net.URL; + +// public class WebRequestExample { +// public static void main(String[] args) { +// try { +// URL url = new URL(""https://api.example.com/data\""); +// HttpURLConnection conn = (HttpURLConnection) url.openConnection(); +// conn.setRequestMethod(""GET""); + +// int responseCode = conn.getResponseCode(); +// if (responseCode == HttpURLConnection.HTTP_OK) { +// BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream())); +// String inputLine; +// StringBuilder response = new StringBuilder(); + +// while ((inputLine = in.readLine()) != null) { +// response.append(inputLine); +// } +// in.close(); +// System.out.println(response.toString()); +// } else { +// System.out.println(""GET request failed: "" + responseCode); +// } +// } catch (Exception e) { +// e.printStackTrace(); +// } +// } +// } +// ``` + +// ### C# (using HttpClient) +// In C#, you can use `HttpClient` to make web requests. + +// ```csharp +// using System; +// using System.Net.Http; +// using System.Threading.Tasks; + +// class Program +// { +// static async Task Main() +// { +// using (HttpClient client = new HttpClient()) +// { +// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); +// if (response.IsSuccessStatusCode) +// { +// string data = await response.Content.ReadAsStringAsync(); +// Console.WriteLine(data); +// } +// else +// { +// Console.WriteLine($""Error: {response.StatusCode}""); +// } +// } +// } +// } +// ``` + +// Choose the example that fits the programming language you are using!"), +// new(Pieces.OS.Client.Copilot.Role.User, "I am using C#"), +// new(Pieces.OS.Client.Copilot.Role.Assistant, @"Great! Since you're using C#, you can use the `HttpClient` class to make web requests. Here's a simple example of how to perform a GET request: + +// ```csharp +// using System; +// using System.Net.Http; +// using System.Threading.Tasks; + +// class Program +// { +// static async Task Main() +// { +// using (HttpClient client = new HttpClient()) +// { +// try +// { +// HttpResponseMessage response = await client.GetAsync(""https://api.example.com/data""); +// if (response.IsSuccessStatusCode) +// { +// string data = await response.Content.ReadAsStringAsync(); +// Console.WriteLine(data); +// } +// else +// { +// Console.WriteLine($""Error: {response.StatusCode}""); +// } +// } +// catch (Exception e) +// { +// Console.WriteLine($""Exception occurred: {e.Message}""); +// } +// } +// } +// } +// ``` + +// ### Explanation: +// - **HttpClient**: This class is used to send HTTP requests and receive HTTP responses from a resource identified by a URI. +// - **GetAsync**: This method sends a GET request to the specified URI. +// - **IsSuccessStatusCode**: This property checks if the response status code indicates success (2xx). +// - **ReadAsStringAsync**: This method reads the response content as a string asynchronously. + +// Make sure to replace `""https://api.example.com/data""` with the actual URL you want to request. If you have any specific requirements or questions, feel free to ask!"), +// }; + +// var chat = await copilot.CreateSeededChatAsync("Question on async tasks", seeds: seeds).ConfigureAwait(false); + +// var question = "Comment this code"; +// var response = await chat.AskQuestionAsync(question); + +// Console.WriteLine(question); +// Console.WriteLine(); +// Console.WriteLine(response); +// Console.WriteLine(); +// } #endregion Seed a conversation From 6fcd5b5a0bef7fbfc71be3862bce91aee2da0685 Mon Sep 17 00:00:00 2001 From: ellie-at-pieces Date: Fri, 18 Oct 2024 19:46:10 +0000 Subject: [PATCH 23/24] fix list numbers --- src/SampleApps/RemindMe/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/SampleApps/RemindMe/README.md b/src/SampleApps/RemindMe/README.md index 23de3e4..a1929a2 100644 --- a/src/SampleApps/RemindMe/README.md +++ b/src/SampleApps/RemindMe/README.md @@ -33,8 +33,8 @@ To run this code, follow these steps: - [macOS](https://docs.pieces.app/installation-getting-started/macos) - [Windows](https://docs.pieces.app/installation-getting-started/windows) - [Linux](https://docs.pieces.app/installation-getting-started/linux) -1. Ensure the Workstream Pattern Engine is running. -1. Build and run the app: +2. Ensure the Workstream Pattern Engine is running. +3. Build and run the app: ```shell dotnet run From 5ebb687520c4580878b749c7886f55b0d10a4705 Mon Sep 17 00:00:00 2001 From: Jim Bennett Date: Fri, 18 Oct 2024 12:53:28 -0700 Subject: [PATCH 24/24] Numbering --- src/SampleApps/RemindMe/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/SampleApps/RemindMe/README.md b/src/SampleApps/RemindMe/README.md index a1929a2..23de3e4 100644 --- a/src/SampleApps/RemindMe/README.md +++ b/src/SampleApps/RemindMe/README.md @@ -33,8 +33,8 @@ To run this code, follow these steps: - [macOS](https://docs.pieces.app/installation-getting-started/macos) - [Windows](https://docs.pieces.app/installation-getting-started/windows) - [Linux](https://docs.pieces.app/installation-getting-started/linux) -2. Ensure the Workstream Pattern Engine is running. -3. Build and run the app: +1. Ensure the Workstream Pattern Engine is running. +1. Build and run the app: ```shell dotnet run