From 83d63ebe41df5e5e4810e11e159cd8f4cbd24c6d Mon Sep 17 00:00:00 2001 From: "Ankush Pala ankush@lastmileai.dev" <> Date: Tue, 26 Dec 2023 13:12:18 -0500 Subject: [PATCH] [easy][ts] 2/n Set run output type to be Output[] instead of Union ## What Updating the types in the ts sdk to return `Output[]` instead of `Output | Output[]` ## Why Run method should always return a array of outputs, even if its just one output. The Prompt schema already refers to `outputs` as `outputs?: Output[];` anyways ## Testplan Ran tests without errors. No type errors on compilation either --- extensions/HuggingFace/typescript/hf.ts | 2 +- extensions/llama/typescript/llama.ts | 2 +- typescript/lib/config.ts | 4 ++-- typescript/lib/modelParser.ts | 2 +- typescript/lib/parameterizedModelParser.ts | 10 ++++++++-- typescript/lib/parsers/hf.ts | 2 +- typescript/lib/parsers/openai.ts | 4 ++-- 7 files changed, 16 insertions(+), 10 deletions(-) diff --git a/extensions/HuggingFace/typescript/hf.ts b/extensions/HuggingFace/typescript/hf.ts index 393867098..6569a0d60 100644 --- a/extensions/HuggingFace/typescript/hf.ts +++ b/extensions/HuggingFace/typescript/hf.ts @@ -184,7 +184,7 @@ export class HuggingFaceTextGenerationModelParserExtension extends Parameterized aiConfig: AIConfigRuntime, options?: InferenceOptions | undefined, params?: JSONObject | undefined - ): Promise { + ): Promise { const startEvent = { name: "on_run_start", file: __filename, diff --git a/extensions/llama/typescript/llama.ts b/extensions/llama/typescript/llama.ts index 39a69982d..3df085e95 100644 --- a/extensions/llama/typescript/llama.ts +++ b/extensions/llama/typescript/llama.ts @@ -351,7 +351,7 @@ export class LlamaModelParser extends ParameterizedModelParser { + ): Promise { const startEvent = { name: "on_run_start", file: __filename, diff --git a/typescript/lib/config.ts b/typescript/lib/config.ts index da04fc814..0a8c13dca 100644 --- a/typescript/lib/config.ts +++ b/typescript/lib/config.ts @@ -398,7 +398,7 @@ export class AIConfigRuntime implements AIConfig { promptName: string, params: JSONObject = {}, options?: InferenceOptions - ) { + ): Promise { const startEvent = { name: "on_run_start", file: __filename, @@ -452,7 +452,7 @@ export class AIConfigRuntime implements AIConfig { promptName: string, params: JSONObject = {}, options?: InferenceOptions - ) { + ): Promise { const prompt = this.getPrompt(promptName); if (!prompt) { throw new Error( diff --git a/typescript/lib/modelParser.ts b/typescript/lib/modelParser.ts index 2db735d2e..25e320319 100644 --- a/typescript/lib/modelParser.ts +++ b/typescript/lib/modelParser.ts @@ -89,7 +89,7 @@ export abstract class ModelParser { aiConfig: AIConfigRuntime, options?: InferenceOptions, params?: JSONObject - ): Promise; + ): Promise; /** * Get the string representing the output from a prompt. diff --git a/typescript/lib/parameterizedModelParser.ts b/typescript/lib/parameterizedModelParser.ts index a8b55b0d3..8d9cd0ffa 100644 --- a/typescript/lib/parameterizedModelParser.ts +++ b/typescript/lib/parameterizedModelParser.ts @@ -63,10 +63,10 @@ export abstract class ParameterizedModelParser< aiConfig: AIConfigRuntime, options?: InferenceOptions, params: JSONObject = {} - ) { + ): Promise { const dependencyGraph = getDependencyGraph(aiConfig); - return await this.runWithDependenciesInternal( + const result = await this.runWithDependenciesInternal( promptName, aiConfig, params, @@ -74,6 +74,12 @@ export abstract class ParameterizedModelParser< /*alreadyExecutedPrompts*/ new Set(), options ); + if (result === undefined) { + // Should never happen. If it does, it's a bug. This check is to help define the return type of this function. + throw new Error(`runWithDependencies() for Prompt ${promptName} returned undefined, something went wrong`); + } + + return result; } private async runWithDependenciesInternal( diff --git a/typescript/lib/parsers/hf.ts b/typescript/lib/parsers/hf.ts index 9c0939ca8..387bae14e 100644 --- a/typescript/lib/parsers/hf.ts +++ b/typescript/lib/parsers/hf.ts @@ -176,7 +176,7 @@ export class HuggingFaceTextGenerationParser extends ParameterizedModelParser { + ): Promise { const startEvent = { name: "on_run_start", file: __filename, diff --git a/typescript/lib/parsers/openai.ts b/typescript/lib/parsers/openai.ts index dcb8eae66..950f328e4 100644 --- a/typescript/lib/parsers/openai.ts +++ b/typescript/lib/parsers/openai.ts @@ -150,7 +150,7 @@ export class OpenAIModelParser extends ParameterizedModelParser { + ): Promise { if (!this.openai) { const apiKey = getAPIKeyFromEnv("OPENAI_API_KEY"); this.openai = new OpenAI({ apiKey, ...(this.openaiOptions || {}) }); @@ -505,7 +505,7 @@ export class OpenAIChatModelParser extends ParameterizedModelParser { + ): Promise { const startEvent = { name: "on_run_start", file: __filename,