From 90e3f3db3eb12b8d02f5bd1d007fd7a364a6b940 Mon Sep 17 00:00:00 2001 From: "Rossdan Craig rossdan@lastmileai.dev" <> Date: Sun, 14 Jan 2024 03:11:20 -0500 Subject: [PATCH] Replace RUN_PROMPT with STREAM_AICONFIG_CHUNK inside of streaming implementation This is the exact same logic, but I'm just going to do this as intermediate step to keep things clear and easy to review ## Test Plan No functional changes, streaming and non streaming models still work --- .../client/src/components/AIConfigEditor.tsx | 7 +-- .../client/src/components/aiconfigReducer.ts | 63 +++++++++++++++++++ 2 files changed, 66 insertions(+), 4 deletions(-) diff --git a/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx b/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx index 93db37ec5..1ce48c5b0 100644 --- a/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx +++ b/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx @@ -637,13 +637,12 @@ export default function EditorContainer({ output: event.data, }); } else if (event.type === "aiconfig_chunk") { - // Next PR: Change this to aiconfig_stream to make it more obvious - // and make STREAM_AICONFIG it's own event so we don't need to pass - // the `isRunning` state to set. See Ryan's comments about this in dispatch({ type: "CONSOLIDATE_AICONFIG", action: { - ...action, + type: "STREAM_AICONFIG_CHUNK", + id: promptId, + cancellationToken, // Keep the prompt running state until the end of streaming isRunning: true, }, diff --git a/python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts b/python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts index cc9c42e60..218b6a114 100644 --- a/python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts +++ b/python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts @@ -15,6 +15,7 @@ export type MutateAIConfigAction = | RunPromptAction | SetDescriptionAction | SetNameAction + | StreamAIConfigChunkAction | StreamOutputChunkAction | StopStreamingAction | UpdatePromptInputAction @@ -28,6 +29,7 @@ export type MutateAIConfigAction = export type ConsolidateAIConfigSubAction = | AddPromptAction | RunPromptAction + | StreamAIConfigChunkAction | UpdatePromptInputAction; export type ConsolidateAIConfigAction = { @@ -78,6 +80,13 @@ export type SetNameAction = { name: string; }; +export type StreamAIConfigChunkAction = { + type: "STREAM_AICONFIG_CHUNK"; + id: string; + cancellationToken?: string; + isRunning?: boolean; +}; + export type StreamOutputChunkAction = { type: "STREAM_OUTPUT_CHUNK"; id: string; @@ -229,6 +238,43 @@ function reduceConsolidateAIConfig( } ); } + case "STREAM_AICONFIG_CHUNK": { + // Note: If we are calling "RUN_PROMPT" directly as a dispatched event + // type, we automatically set the state there to `isRunning` for that + // prompt. That logic does not happen here, it happens in + // `aiconfigReducer`. + // If we are calling "RUN_PROMPT" indirectly via the action of a + // "CONSOLIDATE_AICONFIG" dispatch, we end up here. We need to check + // if we actually want to set the prompt state to `isRunning` + const isRunning = action.isRunning ?? false; + const stateWithUpdatedRunningPromptId = { + ...state, + _ui: { + ...state._ui, + runningPromptId: isRunning ? action.id : undefined, + }, + }; + return reduceReplacePrompt( + stateWithUpdatedRunningPromptId, + action.id, + (prompt) => { + const responsePrompt = responseConfig.prompts.find( + (resPrompt) => resPrompt.name === prompt.name + ); + + const outputs = responsePrompt?.outputs ?? prompt.outputs; + + return { + ...prompt, + _ui: { + ...prompt._ui, + isRunning, + }, + outputs, + }; + } + ); + } case "UPDATE_PROMPT_INPUT": { return reduceReplacePrompt(state, action.id, consolidatePrompt); } @@ -346,6 +392,23 @@ export default function aiconfigReducer( name: action.name, }; } + case "STREAM_AICONFIG_CHUNK": { + const runningState = { + ...dirtyState, + _ui: { + ...dirtyState._ui, + runningPromptId: action.id, + }, + }; + return reduceReplacePrompt(runningState, action.id, (prompt) => ({ + ...prompt, + _ui: { + ...prompt._ui, + cancellationToken: action.cancellationToken, + isRunning: true, + }, + })); + } case "STREAM_OUTPUT_CHUNK": { return reduceReplacePrompt(dirtyState, action.id, (prompt) => ({ ...prompt,