Skip to content

Commit

Permalink
[easy][ts] 2/n Set run output type to be Output[] instead of Union
Browse files Browse the repository at this point in the history
## What

Updating the types in the ts sdk to return `Output[]` instead of `Output | Output[]`

## Why

Run method should always return a array of outputs, even if its just one output.

The Prompt schema already refers to `outputs` as `outputs?: Output[];` anyways


## Testplan

Ran tests without errors. No type errors on compilation either
  • Loading branch information
Ankush Pala [email protected] committed Dec 26, 2023
1 parent 534ee8e commit 83d63eb
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 10 deletions.
2 changes: 1 addition & 1 deletion extensions/HuggingFace/typescript/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ export class HuggingFaceTextGenerationModelParserExtension extends Parameterized
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
2 changes: 1 addition & 1 deletion extensions/llama/typescript/llama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ export class LlamaModelParser extends ParameterizedModelParser<LlamaCompletionPa
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
4 changes: 2 additions & 2 deletions typescript/lib/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ export class AIConfigRuntime implements AIConfig {
promptName: string,
params: JSONObject = {},
options?: InferenceOptions
) {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down Expand Up @@ -452,7 +452,7 @@ export class AIConfigRuntime implements AIConfig {
promptName: string,
params: JSONObject = {},
options?: InferenceOptions
) {
): Promise<Output[]> {
const prompt = this.getPrompt(promptName);
if (!prompt) {
throw new Error(
Expand Down
2 changes: 1 addition & 1 deletion typescript/lib/modelParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export abstract class ModelParser<T = JSONObject, R = T> {
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject
): Promise<Output | Output[]>;
): Promise<Output[]>;

/**
* Get the string representing the output from a prompt.
Expand Down
10 changes: 8 additions & 2 deletions typescript/lib/parameterizedModelParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,17 +63,23 @@ export abstract class ParameterizedModelParser<
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params: JSONObject = {}
) {
): Promise<Output[]> {
const dependencyGraph = getDependencyGraph(aiConfig);

return await this.runWithDependenciesInternal(
const result = await this.runWithDependenciesInternal(
promptName,
aiConfig,
params,
dependencyGraph,
/*alreadyExecutedPrompts*/ new Set<string>(),
options
);
if (result === undefined) {
// Should never happen. If it does, it's a bug. This check is to help define the return type of this function.
throw new Error(`runWithDependencies() for Prompt ${promptName} returned undefined, something went wrong`);
}

return result;
}

private async runWithDependenciesInternal(
Expand Down
2 changes: 1 addition & 1 deletion typescript/lib/parsers/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ export class HuggingFaceTextGenerationParser extends ParameterizedModelParser<Te
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
4 changes: 2 additions & 2 deletions typescript/lib/parsers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ export class OpenAIModelParser extends ParameterizedModelParser<CompletionCreate
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
if (!this.openai) {
const apiKey = getAPIKeyFromEnv("OPENAI_API_KEY");
this.openai = new OpenAI({ apiKey, ...(this.openaiOptions || {}) });
Expand Down Expand Up @@ -505,7 +505,7 @@ export class OpenAIChatModelParser extends ParameterizedModelParser<Chat.ChatCom
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down

0 comments on commit 83d63eb

Please sign in to comment.