Skip to content

Commit

Permalink
Replace RUN_PROMPT with STREAM_AICONFIG_CHUNK inside of streaming imp…
Browse files Browse the repository at this point in the history
…lementation (#919)

Replace RUN_PROMPT with STREAM_AICONFIG_CHUNK inside of streaming
implementation


This is the exact same logic, but I'm just going to do this as
intermediate step to keep things clear and easy to review

## Test Plan
No functional changes, streaming and non streaming models still work

---
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with
[ReviewStack](https://reviewstack.dev/lastmile-ai/aiconfig/pull/919).
* #928
* #926
* #925
* #924
* #922
* #921
* #920
* __->__ #919
* #918
* #917
  • Loading branch information
rossdanlm authored Jan 15, 2024
2 parents 157b277 + 90e3f3d commit 7ffccee
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -645,13 +645,12 @@ export default function EditorContainer({
output: event.data,
});
} else if (event.type === "aiconfig_chunk") {
// Next PR: Change this to aiconfig_stream to make it more obvious
// and make STREAM_AICONFIG it's own event so we don't need to pass
// the `isRunning` state to set. See Ryan's comments about this in
dispatch({
type: "CONSOLIDATE_AICONFIG",
action: {
...action,
type: "STREAM_AICONFIG_CHUNK",
id: promptId,
cancellationToken,
// Keep the prompt running state until the end of streaming
isRunning: true,
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export type MutateAIConfigAction =
| RunPromptAction
| SetDescriptionAction
| SetNameAction
| StreamAIConfigChunkAction
| StreamOutputChunkAction
| StopStreamingAction
| UpdatePromptInputAction
Expand All @@ -28,6 +29,7 @@ export type MutateAIConfigAction =
export type ConsolidateAIConfigSubAction =
| AddPromptAction
| RunPromptAction
| StreamAIConfigChunkAction
| UpdatePromptInputAction;

export type ConsolidateAIConfigAction = {
Expand Down Expand Up @@ -78,6 +80,13 @@ export type SetNameAction = {
name: string;
};

export type StreamAIConfigChunkAction = {
type: "STREAM_AICONFIG_CHUNK";
id: string;
cancellationToken?: string;
isRunning?: boolean;
};

export type StreamOutputChunkAction = {
type: "STREAM_OUTPUT_CHUNK";
id: string;
Expand Down Expand Up @@ -229,6 +238,43 @@ function reduceConsolidateAIConfig(
}
);
}
case "STREAM_AICONFIG_CHUNK": {
// Note: If we are calling "RUN_PROMPT" directly as a dispatched event
// type, we automatically set the state there to `isRunning` for that
// prompt. That logic does not happen here, it happens in
// `aiconfigReducer`.
// If we are calling "RUN_PROMPT" indirectly via the action of a
// "CONSOLIDATE_AICONFIG" dispatch, we end up here. We need to check
// if we actually want to set the prompt state to `isRunning`
const isRunning = action.isRunning ?? false;
const stateWithUpdatedRunningPromptId = {
...state,
_ui: {
...state._ui,
runningPromptId: isRunning ? action.id : undefined,
},
};
return reduceReplacePrompt(
stateWithUpdatedRunningPromptId,
action.id,
(prompt) => {
const responsePrompt = responseConfig.prompts.find(
(resPrompt) => resPrompt.name === prompt.name
);

const outputs = responsePrompt?.outputs ?? prompt.outputs;

return {
...prompt,
_ui: {
...prompt._ui,
isRunning,
},
outputs,
};
}
);
}
case "UPDATE_PROMPT_INPUT": {
return reduceReplacePrompt(state, action.id, consolidatePrompt);
}
Expand Down Expand Up @@ -346,6 +392,23 @@ export default function aiconfigReducer(
name: action.name,
};
}
case "STREAM_AICONFIG_CHUNK": {
const runningState = {
...dirtyState,
_ui: {
...dirtyState._ui,
runningPromptId: action.id,
},
};
return reduceReplacePrompt(runningState, action.id, (prompt) => ({
...prompt,
_ui: {
...prompt._ui,
cancellationToken: action.cancellationToken,
isRunning: true,
},
}));
}
case "STREAM_OUTPUT_CHUNK": {
return reduceReplacePrompt(dirtyState, action.id, (prompt) => ({
...prompt,
Expand Down

0 comments on commit 7ffccee

Please sign in to comment.