Skip to content

Commit

Permalink
[easy][ts] 2/n Set run output type to be Output[] instead of Union
Browse files Browse the repository at this point in the history
## What

Updating the types in the ts sdk to return `Output[]` instead of `Output | Output[]`

## Why

Run method should always return a array of outputs, even if its just one output.

The Prompt schema already refers to `outputs` as `outputs?: Output[];` anyways


## Testplan

Ran tests without errors. No type errors on compilation either

<img width="997" alt="Screenshot 2023-12-26 at 1 16 46 PM" src="https://github.com/lastmile-ai/aiconfig/assets/141073967/a5e3f16f-39e4-4325-95e1-6ac837daeab0">
  • Loading branch information
Ankush Pala [email protected] committed Dec 27, 2023
1 parent fae4196 commit c91d8bf
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion extensions/HuggingFace/typescript/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ export class HuggingFaceTextGenerationModelParserExtension extends Parameterized
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
2 changes: 1 addition & 1 deletion extensions/llama/typescript/llama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ export class LlamaModelParser extends ParameterizedModelParser<LlamaCompletionPa
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
4 changes: 2 additions & 2 deletions typescript/lib/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ export class AIConfigRuntime implements AIConfig {
promptName: string,
params: JSONObject = {},
options?: InferenceOptions
) {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down Expand Up @@ -452,7 +452,7 @@ export class AIConfigRuntime implements AIConfig {
promptName: string,
params: JSONObject = {},
options?: InferenceOptions
) {
): Promise<Output[] | undefined> {
const prompt = this.getPrompt(promptName);
if (!prompt) {
throw new Error(
Expand Down
2 changes: 1 addition & 1 deletion typescript/lib/modelParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export abstract class ModelParser<T = JSONObject, R = T> {
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject
): Promise<Output | Output[]>;
): Promise<Output[]>;

/**
* Get the string representing the output from a prompt.
Expand Down
2 changes: 1 addition & 1 deletion typescript/lib/parameterizedModelParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ export abstract class ParameterizedModelParser<
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params: JSONObject = {}
) {
): Promise<Output[] | undefined> {
const dependencyGraph = getDependencyGraph(aiConfig);

return await this.runWithDependenciesInternal(
Expand Down
2 changes: 1 addition & 1 deletion typescript/lib/parsers/hf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ export class HuggingFaceTextGenerationParser extends ParameterizedModelParser<Te
aiConfig: AIConfigRuntime,
options?: InferenceOptions | undefined,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down
4 changes: 2 additions & 2 deletions typescript/lib/parsers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ export class OpenAIModelParser extends ParameterizedModelParser<CompletionCreate
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
if (!this.openai) {
const apiKey = getAPIKeyFromEnv("OPENAI_API_KEY");
this.openai = new OpenAI({ apiKey, ...(this.openaiOptions || {}) });
Expand Down Expand Up @@ -505,7 +505,7 @@ export class OpenAIChatModelParser extends ParameterizedModelParser<Chat.ChatCom
aiConfig: AIConfigRuntime,
options?: InferenceOptions,
params?: JSONObject | undefined
): Promise<Output | Output[]> {
): Promise<Output[]> {
const startEvent = {
name: "on_run_start",
file: __filename,
Expand Down

0 comments on commit c91d8bf

Please sign in to comment.