Skip to content

Commit

Permalink
[editor] Use Prompt ID Instead of Index for Client-side State Actions
Browse files Browse the repository at this point in the history
# [editor] Use Prompt ID Instead of Index for Client-side State Actions

We can uniquely identify prompts in client-side state using their client-side id, so let's use that instead of index for referencing them in the actions. This should be a bit more robust than using index.

## Testing:
Went through all relevant actions and made sure they work as expected
  • Loading branch information
Ryan Holinshead committed Dec 29, 2023
1 parent 9b7bee7 commit 6367402
Show file tree
Hide file tree
Showing 3 changed files with 77 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ export default function EditorContainer({
const onSave = useCallback(async () => {
setIsSaving(true);
try {
await callbacks.save(clientConfigToAIConfig(aiconfigState));
await callbacks.save(clientConfigToAIConfig(stateRef.current));
} catch (err: any) {
showNotification({
title: "Error saving",
Expand All @@ -106,7 +106,7 @@ export default function EditorContainer({
} finally {
setIsSaving(false);
}
}, [aiconfigState, callbacks.save]);
}, [callbacks.save]);

const debouncedUpdatePrompt = useMemo(
() =>
Expand All @@ -119,18 +119,18 @@ export default function EditorContainer({
);

const onChangePromptInput = useCallback(
async (promptIndex: number, newPromptInput: PromptInput) => {
async (promptId: string, newPromptInput: PromptInput) => {
const action: AIConfigReducerAction = {
type: "UPDATE_PROMPT_INPUT",
index: promptIndex,
id: promptId,
input: newPromptInput,
};

dispatch(action);

try {
const prompt = clientPromptToAIConfigPrompt(
aiconfigState.prompts[promptIndex]
getPrompt(stateRef.current, promptId)!
);
const serverConfigRes = await debouncedUpdatePrompt(prompt.name, {
...prompt,
Expand All @@ -154,10 +154,10 @@ export default function EditorContainer({
);

const onChangePromptName = useCallback(
async (promptIndex: number, newName: string) => {
async (promptId: string, newName: string) => {
const action: AIConfigReducerAction = {
type: "UPDATE_PROMPT_NAME",
index: promptIndex,
id: promptId,
name: newName,
};

Expand All @@ -177,10 +177,10 @@ export default function EditorContainer({
);

const onUpdatePromptModelSettings = useCallback(
async (promptIndex: number, newModelSettings: any) => {
async (promptId: string, newModelSettings: any) => {
dispatch({
type: "UPDATE_PROMPT_MODEL_SETTINGS",
index: promptIndex,
id: promptId,
modelSettings: newModelSettings,
});
// TODO: Call server-side endpoint to update model
Expand All @@ -189,16 +189,16 @@ export default function EditorContainer({
);

const onUpdatePromptModel = useCallback(
async (promptIndex: number, newModel?: string) => {
async (promptId: string, newModel?: string) => {
dispatch({
type: "UPDATE_PROMPT_MODEL",
index: promptIndex,
id: promptId,
modelName: newModel,
});

try {
const prompt = clientPromptToAIConfigPrompt(
aiconfigState.prompts[promptIndex]
getPrompt(stateRef.current, promptId)!
);
const currentModel = prompt.metadata?.model;
let modelData: string | ModelMetadata | undefined = newModel;
Expand All @@ -224,10 +224,10 @@ export default function EditorContainer({
);

const onUpdatePromptParameters = useCallback(
async (promptIndex: number, newParameters: any) => {
async (promptId: string, newParameters: any) => {
dispatch({
type: "UPDATE_PROMPT_PARAMETERS",
index: promptIndex,
id: promptId,
parameters: newParameters,
});
// TODO: Call server-side endpoint to update prompt parameters
Expand Down Expand Up @@ -306,8 +306,8 @@ export default function EditorContainer({
);

const onRunPrompt = useCallback(
async (promptIndex: number) => {
const promptName = aiconfigState.prompts[promptIndex].name;
async (promptId: string) => {
const promptName = getPrompt(stateRef.current, promptId)!.name;
try {
await callbacks.runPrompt(promptName);
} catch (err: any) {
Expand Down Expand Up @@ -345,7 +345,6 @@ export default function EditorContainer({
onDeletePrompt={() => onDeletePrompt(prompt._ui.id)}
/>
<PromptContainer
index={i}
prompt={prompt}
getModels={callbacks.getModels}
onChangePromptInput={onChangePromptInput}
Expand Down
69 changes: 39 additions & 30 deletions python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,57 +32,56 @@ export type DeletePromptAction = {
id: string;
};

// TODO: Update index to prompt id for all existing-prompt actions
export type UpdatePromptInputAction = {
type: "UPDATE_PROMPT_INPUT";
index: number;
id: string;
input: PromptInput;
};

export type UpdatePromptNameAction = {
type: "UPDATE_PROMPT_NAME";
index: number;
id: string;
name: string;
};

export type UpdatePromptModelAction = {
type: "UPDATE_PROMPT_MODEL";
index: number;
id: string;
modelName?: string;
};

export type UpdatePromptModelSettingsAction = {
type: "UPDATE_PROMPT_MODEL_SETTINGS";
index: number;
id: string;
modelSettings: JSONObject;
};

// TODO: saqadri - can likely use this same action for global parameters update
export type UpdatePromptParametersAction = {
type: "UPDATE_PROMPT_PARAMETERS";
index: number;
id: string;
parameters: JSONObject;
};

function reduceReplacePrompt(
state: ClientAIConfig,
index: number,
id: string,
replacerFn: (prompt: ClientPrompt) => ClientPrompt
): ClientAIConfig {
return {
...state,
prompts: state.prompts.map((prompt, i) =>
i === index ? replacerFn(prompt) : prompt
prompts: state.prompts.map((prompt) =>
prompt._ui.id === id ? replacerFn(prompt) : prompt
),
};
}

function reduceReplaceInput(
state: ClientAIConfig,
index: number,
id: string,
replacerFn: (input: PromptInput) => PromptInput
): ClientAIConfig {
return reduceReplacePrompt(state, index, (prompt) => ({
return reduceReplacePrompt(state, id, (prompt) => ({
...prompt,
input: replacerFn(prompt.input),
}));
Expand All @@ -108,22 +107,32 @@ function reduceConsolidateAIConfig(
action: MutateAIConfigAction,
responseConfig: AIConfig
): ClientAIConfig {
// Make sure prompt structure is properly updated. Client input and metadata takes precedence
// since it may have been updated by the user while the request was in flight
const consolidatePrompt = (statePrompt: ClientPrompt) => {
const responsePrompt = responseConfig.prompts.find(
(resPrompt) => resPrompt.name === statePrompt.name
);
return {
...responsePrompt,
...statePrompt,
metadata: {
...responsePrompt!.metadata,
...statePrompt.metadata,
},
} as ClientPrompt;
};

switch (action.type) {
case "ADD_PROMPT_AT_INDEX":
case "ADD_PROMPT_AT_INDEX": {
return reduceReplacePrompt(
state,
action.prompt._ui.id,
consolidatePrompt
);
}
case "UPDATE_PROMPT_INPUT": {
// Make sure prompt structure is properly updated. Client input and metadata takes precedence
// since it may have been updated by the user while the request was in flight
return reduceReplacePrompt(state, action.index, (prompt) => {
const responsePrompt = responseConfig.prompts[action.index];
return {
...responsePrompt,
...prompt,
metadata: {
...responsePrompt.metadata,
...prompt.metadata,
},
} as ClientPrompt;
});
return reduceReplacePrompt(state, action.id, consolidatePrompt);
}
default: {
return state;
Expand All @@ -146,16 +155,16 @@ export default function aiconfigReducer(
};
}
case "UPDATE_PROMPT_INPUT": {
return reduceReplaceInput(state, action.index, () => action.input);
return reduceReplaceInput(state, action.id, () => action.input);
}
case "UPDATE_PROMPT_NAME": {
return reduceReplacePrompt(state, action.index, (prompt) => ({
return reduceReplacePrompt(state, action.id, (prompt) => ({
...prompt,
name: action.name,
}));
}
case "UPDATE_PROMPT_MODEL": {
return reduceReplacePrompt(state, action.index, (prompt) => ({
return reduceReplacePrompt(state, action.id, (prompt) => ({
...prompt,
metadata: {
...prompt.metadata,
Expand All @@ -169,7 +178,7 @@ export default function aiconfigReducer(
}));
}
case "UPDATE_PROMPT_MODEL_SETTINGS": {
return reduceReplacePrompt(state, action.index, (prompt) => ({
return reduceReplacePrompt(state, action.id, (prompt) => ({
...prompt,
metadata: {
...prompt.metadata,
Expand All @@ -186,7 +195,7 @@ export default function aiconfigReducer(
}));
}
case "UPDATE_PROMPT_PARAMETERS": {
return reduceReplacePrompt(state, action.index, (prompt) => ({
return reduceReplacePrompt(state, action.id, (prompt) => ({
...prompt,
metadata: {
...prompt.metadata,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,17 @@ import PromptName from "./PromptName";
import ModelSelector from "./ModelSelector";

type Props = {
index: number;
prompt: ClientPrompt;
getModels: (search: string) => Promise<string[]>;
onChangePromptInput: (
promptIndex: number,
promptId: string,
newPromptInput: AIConfigPromptInput
) => void;
onChangePromptName: (promptIndex: number, newName: string) => void;
onRunPrompt(promptIndex: number): Promise<void>;
onUpdateModel: (promptIndex: number, newModel?: string) => void;
onUpdateModelSettings: (promptIndex: number, newModelSettings: any) => void;
onUpdateParameters: (promptIndex: number, newParameters: any) => void;
onChangePromptName: (promptId: string, newName: string) => void;
onRunPrompt(promptId: string): Promise<void>;
onUpdateModel: (promptId: string, newModel?: string) => void;
onUpdateModelSettings: (ppromptId: string, newModelSettings: any) => void;
onUpdateParameters: (promptId: string, newParameters: any) => void;
defaultConfigModelName?: string;
};

Expand All @@ -44,7 +43,6 @@ const useStyles = createStyles((theme) => ({

export default memo(function PromptContainer({
prompt,
index,
getModels,
onChangePromptInput,
onChangePromptName,
Expand All @@ -54,19 +52,21 @@ export default memo(function PromptContainer({
onUpdateModelSettings,
onUpdateParameters,
}: Props) {
const promptId = prompt._ui.id;
const onChangeInput = useCallback(
(newInput: AIConfigPromptInput) => onChangePromptInput(index, newInput),
[index, onChangePromptInput]
(newInput: AIConfigPromptInput) => onChangePromptInput(promptId, newInput),
[promptId, onChangePromptInput]
);

const onChangeName = useCallback(
(newName: string) => onChangePromptName(index, newName),
[index, onChangePromptName]
(newName: string) => onChangePromptName(promptId, newName),
[promptId, onChangePromptName]
);

const updateModelSettings = useCallback(
(newModelSettings: any) => onUpdateModelSettings(index, newModelSettings),
[index, onUpdateModelSettings]
(newModelSettings: any) =>
onUpdateModelSettings(promptId, newModelSettings),
[promptId, onUpdateModelSettings]
);

const updateParameters = useCallback(
Expand All @@ -82,24 +82,24 @@ export default memo(function PromptContainer({
newParameters[key] = val;
}

onUpdateParameters(index, newParameters);
onUpdateParameters(promptId, newParameters);
},
[index, onUpdateParameters]
[promptId, onUpdateParameters]
);

const runPrompt = useCallback(
async () => await onRunPrompt(index),
[index, onRunPrompt]
async () => await onRunPrompt(promptId),
[promptId, onRunPrompt]
);

const updateModel = useCallback(
(model?: string) => onUpdateModel(index, model),
[index, onUpdateModel]
(model?: string) => onUpdateModel(promptId, model),
[promptId, onUpdateModel]
);

// TODO: When adding support for custom PromptContainers, implement a PromptContainerRenderer which
// will take in the index and callback and render the appropriate PromptContainer with new memoized
// callback and not having to pass index down to PromptContainer
// will take in the promptId and callback and render the appropriate PromptContainer with new memoized
// callback and not having to pass promptId down to PromptContainer

const promptSchema = getPromptSchema(prompt, defaultConfigModelName);
const inputSchema = promptSchema?.input;
Expand Down

0 comments on commit 6367402

Please sign in to comment.