From e8223f34ae015398ef060444286f1fa7dd0cd622 Mon Sep 17 00:00:00 2001 From: "Rossdan Craig rossdan@lastmileai.dev" <> Date: Wed, 27 Dec 2023 10:13:09 -0500 Subject: [PATCH] [typescript] Save output.data with OutputData type instead of pure string This will make it easier for Ryan to parse because he just has to check the `.kind` field and if it's string, he knows that the string is meant to be string only. I also kept the existing form where it will still work if we ONLY pass in string, in which case we'd just assume it's supposed to be in pure string form I also added function call support to this diff for `openai.ts`. Sorry yea I know it's a bigger diff. ##Test plan Pass automated tests, running these commands from `aiconfig` top-level dir ``` npx ts-node typescript/demo/function-call-stream.ts npx ts-node typescript/demo/demo.ts npx ts-node typescript/demo/test-hf.ts ``` Also run `yarn test` from typescript dir --- extensions/HuggingFace/typescript/hf.ts | 50 ++++-- typescript/__tests__/parsers/hf/hf.test.ts | 5 +- .../__tests__/parsers/palm-text/palm.test.ts | 5 +- typescript/lib/parsers/hf.ts | 36 ++-- typescript/lib/parsers/openai.ts | 155 ++++++++++++++---- typescript/lib/parsers/palm.ts | 27 ++- 6 files changed, 209 insertions(+), 69 deletions(-) diff --git a/extensions/HuggingFace/typescript/hf.ts b/extensions/HuggingFace/typescript/hf.ts index d8a638aaa..6e0e29c29 100644 --- a/extensions/HuggingFace/typescript/hf.ts +++ b/extensions/HuggingFace/typescript/hf.ts @@ -6,15 +6,16 @@ import { } from "@huggingface/inference"; import { - Prompt, - Output, - PromptInput, - ParameterizedModelParser, - ModelMetadata, - ExecuteResult, AIConfigRuntime, - InferenceOptions, CallbackEvent, + ExecuteResult, + InferenceOptions, + ModelMetadata, + Output, + OutputData, + ParameterizedModelParser, + Prompt, + PromptInput, } from "aiconfig"; import _ from "lodash"; import * as aiconfig from "aiconfig"; @@ -248,7 +249,14 @@ export class HuggingFaceTextGenerationModelParserExtension extends Parameterized } if (output.output_type === "execute_result") { - if (typeof output.data === "string") { + if (output.data?.hasOwnProperty("value")) { + const outputData = output.data as OutputData; + if (typeof outputData.value === "string") { + return outputData.value; + } + // should never get here for this model parser, just being safe + return JSON.stringify(outputData.value); + } else if (typeof output.data === "string") { return output.data; } @@ -278,30 +286,36 @@ async function constructStreamOutput( let output = {} as ExecuteResult; for await (const iteration of response) { - const data = iteration.token.text; - const metadata = iteration; + const newText = iteration.token.text; - accumulatedMessage += data; - const delta = data; + accumulatedMessage += newText; const index = 0; - options.callbacks!.streamCallback(delta, accumulatedMessage, 0); + options.callbacks!.streamCallback(newText, accumulatedMessage, index); + const outputData: OutputData = { + kind: "string", + // TODO: Investigate if we should use the accumulated message instead + // of newText: https://github.com/lastmile-ai/aiconfig/issues/620 + value: newText, + }; output = { output_type: "execute_result", - // TODO: Investigate if we should use the accumulated message instead - // of delta: https://github.com/lastmile-ai/aiconfig/issues/620 - data: delta, + data: outputData, execution_count: index, - metadata: metadata, + metadata: iteration, } as ExecuteResult; } return output; } function constructOutput(response: TextGenerationOutput): Output { + const data: OutputData = { + kind: "string", + value: response.generated_text, + }; const output = { output_type: "execute_result", - data: response.generated_text, + data, execution_count: 0, metadata: { rawResponse: response }, } as ExecuteResult; diff --git a/typescript/__tests__/parsers/hf/hf.test.ts b/typescript/__tests__/parsers/hf/hf.test.ts index ed3dd1c51..51afb346f 100644 --- a/typescript/__tests__/parsers/hf/hf.test.ts +++ b/typescript/__tests__/parsers/hf/hf.test.ts @@ -257,7 +257,10 @@ describe("HuggingFaceTextGeneration ModelParser", () => { const expectedOutput = { output_type: "execute_result", - data: "Test text generation", + data: { + kind: "string", + value: "Test text generation", + }, execution_count: 0, metadata: { rawResponse: { diff --git a/typescript/__tests__/parsers/palm-text/palm.test.ts b/typescript/__tests__/parsers/palm-text/palm.test.ts index cdbffe309..e204e33d1 100644 --- a/typescript/__tests__/parsers/palm-text/palm.test.ts +++ b/typescript/__tests__/parsers/palm-text/palm.test.ts @@ -122,6 +122,9 @@ describe("PaLM Text ModelParser", () => { const aiconfig = AIConfigRuntime.load(PALM_CONFIG_PATH); const [result] = (await aiconfig.run("prompt1")) as Output[]; - expect((result as ExecuteResult).data).toEqual("Ranch"); + expect((result as ExecuteResult).data).toEqual({ + kind: "string", + value: "Ranch", + }); }); }); diff --git a/typescript/lib/parsers/hf.ts b/typescript/lib/parsers/hf.ts index f4048e56d..3d50a6289 100644 --- a/typescript/lib/parsers/hf.ts +++ b/typescript/lib/parsers/hf.ts @@ -12,6 +12,7 @@ import { ExecuteResult, ModelMetadata, Output, + OutputData, Prompt, PromptInput, } from "../../types"; @@ -240,7 +241,15 @@ export class HuggingFaceTextGenerationParser extends ParameterizedModelParser