Skip to content

Commit

Permalink
[typescript] Save output.data with OutputData type instead of pure st…
Browse files Browse the repository at this point in the history
…ring

This will make it easier for Ryan to parse because he just has to check the `.kind` field and if it's string, he knows that the string is meant to be string only. I also kept the existing form where it will still work if we ONLY pass in string, in which case we'd just assume it's supposed to be in pure string form

I also added function call support to this diff for `openai.ts`. Sorry yea I know it's a bigger diff.

##Test plan
Pass automated tests, running these commands from `aiconfig` top-level dir
```
npx ts-node typescript/demo/function-call-stream.ts
npx ts-node typescript/demo/demo.ts
npx ts-node typescript/demo/test-hf.ts
```

Also run `yarn test` from typescript dir
  • Loading branch information
Rossdan Craig [email protected] committed Dec 27, 2023
1 parent d35448f commit e8223f3
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 69 deletions.
50 changes: 32 additions & 18 deletions extensions/HuggingFace/typescript/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,16 @@ import {
} from "@huggingface/inference";

import {
Prompt,
Output,
PromptInput,
ParameterizedModelParser,
ModelMetadata,
ExecuteResult,
AIConfigRuntime,
InferenceOptions,
CallbackEvent,
ExecuteResult,
InferenceOptions,
ModelMetadata,
Output,
OutputData,
ParameterizedModelParser,
Prompt,
PromptInput,
} from "aiconfig";
import _ from "lodash";
import * as aiconfig from "aiconfig";
Expand Down Expand Up @@ -248,7 +249,14 @@ export class HuggingFaceTextGenerationModelParserExtension extends Parameterized
}

if (output.output_type === "execute_result") {
if (typeof output.data === "string") {
if (output.data?.hasOwnProperty("value")) {
const outputData = output.data as OutputData;
if (typeof outputData.value === "string") {
return outputData.value;
}
// should never get here for this model parser, just being safe
return JSON.stringify(outputData.value);
} else if (typeof output.data === "string") {
return output.data;
}

Expand Down Expand Up @@ -278,30 +286,36 @@ async function constructStreamOutput(
let output = {} as ExecuteResult;

for await (const iteration of response) {
const data = iteration.token.text;
const metadata = iteration;
const newText = iteration.token.text;

accumulatedMessage += data;
const delta = data;
accumulatedMessage += newText;
const index = 0;
options.callbacks!.streamCallback(delta, accumulatedMessage, 0);
options.callbacks!.streamCallback(newText, accumulatedMessage, index);

const outputData: OutputData = {
kind: "string",
// TODO: Investigate if we should use the accumulated message instead
// of newText: https://github.com/lastmile-ai/aiconfig/issues/620
value: newText,
};
output = {
output_type: "execute_result",
// TODO: Investigate if we should use the accumulated message instead
// of delta: https://github.com/lastmile-ai/aiconfig/issues/620
data: delta,
data: outputData,
execution_count: index,
metadata: metadata,
metadata: iteration,
} as ExecuteResult;
}
return output;
}

function constructOutput(response: TextGenerationOutput): Output {
const data: OutputData = {
kind: "string",
value: response.generated_text,
};
const output = {
output_type: "execute_result",
data: response.generated_text,
data,
execution_count: 0,
metadata: { rawResponse: response },
} as ExecuteResult;
Expand Down
5 changes: 4 additions & 1 deletion typescript/__tests__/parsers/hf/hf.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,10 @@ describe("HuggingFaceTextGeneration ModelParser", () => {

const expectedOutput = {
output_type: "execute_result",
data: "Test text generation",
data: {
kind: "string",
value: "Test text generation",
},
execution_count: 0,
metadata: {
rawResponse: {
Expand Down
5 changes: 4 additions & 1 deletion typescript/__tests__/parsers/palm-text/palm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ describe("PaLM Text ModelParser", () => {
const aiconfig = AIConfigRuntime.load(PALM_CONFIG_PATH);

const [result] = (await aiconfig.run("prompt1")) as Output[];
expect((result as ExecuteResult).data).toEqual("Ranch");
expect((result as ExecuteResult).data).toEqual({
kind: "string",
value: "Ranch",
});
});
});
36 changes: 26 additions & 10 deletions typescript/lib/parsers/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
ExecuteResult,
ModelMetadata,
Output,
OutputData,
Prompt,
PromptInput,
} from "../../types";
Expand Down Expand Up @@ -240,7 +241,15 @@ export class HuggingFaceTextGenerationParser extends ParameterizedModelParser<Te
}

if (output.output_type === "execute_result") {
if (typeof output.data === "string") {
if (output.data?.hasOwnProperty("value")) {
const outputData = output.data as OutputData;
if (typeof outputData.value === "string") {
return outputData.value;
}
// Sarmad + Ryan, let me know if you prefer I do something else
// Was basing this off existing code for function calls in openai.ts
return JSON.stringify(outputData.value);
} else if (typeof output.data === "string") {
return output.data;
}

Expand Down Expand Up @@ -270,30 +279,37 @@ async function constructStreamOutput(
let output = {} as ExecuteResult;

for await (const iteration of response) {
const data = iteration.token.text;
const newText = iteration.token.text;
const metadata = iteration;

accumulatedMessage += data;
const delta = data;
accumulatedMessage += newText;
const index = 0;
options.callbacks!.streamCallback(delta, accumulatedMessage, 0);
options.callbacks!.streamCallback(newText, accumulatedMessage, 0);

const data: OutputData = {
kind: "string",
// TODO: Investigate if we should use the accumulated message instead
// of newText: https://github.com/lastmile-ai/aiconfig/issues/620
value: newText,
};
output = {
output_type: "execute_result",
// TODO: Investigate if we should use the accumulated message instead
// of delta: https://github.com/lastmile-ai/aiconfig/issues/620
data: delta,
data,
execution_count: index,
metadata: { metadata },
metadata,
} as ExecuteResult;
}
return output;
}

function constructOutput(response: TextGenerationOutput): Output {
const data: OutputData = {
kind: "string",
value: response.generated_text,
};
const output = {
output_type: "execute_result",
data: response.generated_text,
data,
execution_count: 0,
metadata: { rawResponse: response },
} as ExecuteResult;
Expand Down
Loading

0 comments on commit e8223f3

Please sign in to comment.