Skip to content

Commit

Permalink
[editor] Add Prompt Button
Browse files Browse the repository at this point in the history
  • Loading branch information
Ryan Holinshead committed Dec 26, 2023
1 parent 5e336f0 commit 276af8d
Show file tree
Hide file tree
Showing 9 changed files with 315 additions and 20 deletions.
2 changes: 1 addition & 1 deletion python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ flask[async]
google-generativeai
huggingface_hub
hypothesis==6.91.0
lastmile-utils==0.0.13
lastmile-utils==0.0.14
mock
nest_asyncio
nltk
Expand Down
32 changes: 29 additions & 3 deletions python/src/aiconfig/editor/client/src/Editor.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import EditorContainer from "./components/EditorContainer";
import { ClientAIConfig } from "./shared/types";
import { Flex, Loader } from "@mantine/core";
import { AIConfig } from "aiconfig";
import { AIConfig, Prompt } from "aiconfig";
import { useCallback, useEffect, useState } from "react";
import { ufetch } from "ufetch";
import { ROUTE_TABLE } from "./utils/api";
Expand All @@ -20,21 +20,47 @@ export default function Editor() {
}, [loadConfig]);

const onSave = useCallback(async (aiconfig: AIConfig) => {
const res = await ufetch.post(`/api/aiconfig/save`, {
const res = await ufetch.post(ROUTE_TABLE.SAVE, {
// path: file path,
aiconfig,
});
return res;
}, []);

const getModels = useCallback(async (search: string) => {
// For now, rely on caching and handle client-side search filtering
// We will use server-side search filtering for Gradio
const res = await ufetch.get(ROUTE_TABLE.LIST_MODELS);
const models = res.data;
if (search && search.length > 0) {
return models.filter((model: string) => model.indexOf(search) >= 0);
}
return models;
}, []);

const addPrompt = useCallback(
async (promptName: string, promptData: Prompt) => {
return await ufetch.post(ROUTE_TABLE.ADD_PROMPT, {
prompt_name: promptName,
prompt_data: promptData,
});
},
[]
);

return (
<div>
{!aiconfig ? (
<Flex justify="center" mt="xl">
<Loader size="xl" />
</Flex>
) : (
<EditorContainer aiconfig={aiconfig} onSave={onSave} />
<EditorContainer
aiconfig={aiconfig}
onSave={onSave}
getModels={getModels}
addPrompt={addPrompt}
/>
)}
</div>
);
Expand Down
112 changes: 99 additions & 13 deletions python/src/aiconfig/editor/client/src/components/EditorContainer.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,45 @@
import PromptContainer from "./prompt/PromptContainer";
import { Container, Group, Button, createStyles } from "@mantine/core";
import { Container, Group, Button, createStyles, Stack } from "@mantine/core";
import { showNotification } from "@mantine/notifications";
import { AIConfig, PromptInput } from "aiconfig";
import { useCallback, useReducer, useState } from "react";
import aiconfigReducer from "./aiconfigReducer";
import { AIConfig, Prompt, PromptInput } from "aiconfig";
import { useCallback, useReducer, useRef, useState } from "react";
import aiconfigReducer, { AIConfigReducerAction } from "./aiconfigReducer";
import { ClientAIConfig, clientConfigToAIConfig } from "../shared/types";
import AddPromptButton from "./prompt/AddPromptButton";
import { getDefaultNewPromptName } from "../utils/aiconfigStateUtils";

type Props = {
aiconfig: ClientAIConfig;
addPrompt: (
promptName: string,
prompt: Prompt
) => Promise<{ aiconfig: AIConfig }>;
onSave: (aiconfig: AIConfig) => Promise<void>;
getModels: (search: string) => Promise<string[]>;
};

const useStyles = createStyles((theme) => ({
addPromptRow: {
borderRadius: "4px",
display: "inline-block",
bottom: -24,
left: -40,
"&:hover": {
backgroundColor:
theme.colorScheme === "light"
? theme.colors.gray[1]
: "rgba(255, 255, 255, 0.1)",
},
[theme.fn.smallerThan("sm")]: {
marginLeft: "0",
display: "block",
position: "static",
bottom: -10,
left: 0,
height: 28,
margin: "10px 0",
},
},
promptsContainer: {
[theme.fn.smallerThan("sm")]: {
padding: "0 0 200px 0",
Expand All @@ -22,14 +50,19 @@ const useStyles = createStyles((theme) => ({

export default function EditorContainer({
aiconfig: initialAIConfig,
addPrompt,
onSave,
getModels,
}: Props) {
const [isSaving, setIsSaving] = useState(false);
const [aiconfigState, dispatch] = useReducer(
aiconfigReducer,
initialAIConfig
);

const stateRef = useRef(aiconfigState);
stateRef.current = aiconfigState;

const save = useCallback(async () => {
setIsSaving(true);
try {
Expand Down Expand Up @@ -81,6 +114,43 @@ export default function EditorContainer({
[dispatch]
);

const onAddPrompt = useCallback(
async (promptIndex: number, model: string) => {
const promptName = getDefaultNewPromptName(stateRef.current as AIConfig);
const newPrompt: Prompt = {
name: promptName,
input: "", // TODO: Can we use schema to get input structure, string vs object?
metadata: {
model,
},
};

const action: AIConfigReducerAction = {
type: "ADD_PROMPT_AT_INDEX",
index: promptIndex,
prompt: newPrompt,
};

dispatch(action);

try {
const serverConfigRes = await addPrompt(promptName, newPrompt);
dispatch({
type: "CONSOLIDATE_AICONFIG",
action,
config: serverConfigRes.aiconfig,
});
} catch (err: any) {
showNotification({
title: "Error adding prompt to config",
message: err.message,
color: "red",
});
}
},
[addPrompt, dispatch]
);

const { classes } = useStyles();

// TODO: Implement editor context for callbacks, readonly state, etc.
Expand All @@ -100,15 +170,31 @@ export default function EditorContainer({
<Container maw="80rem" className={classes.promptsContainer}>
{aiconfigState.prompts.map((prompt: any, i: number) => {
return (
<PromptContainer
index={i}
prompt={prompt}
key={prompt.name}
onChangePromptInput={onChangePromptInput}
onUpdateModelSettings={onUpdatePromptModelSettings}
onUpdateParameters={onUpdatePromptParameters}
defaultConfigModelName={aiconfigState.metadata.default_model}
/>
<Stack key={prompt.name}>
<PromptContainer
index={i}
prompt={prompt}
onChangePromptInput={onChangePromptInput}
onUpdateModelSettings={onUpdatePromptModelSettings}
onUpdateParameters={onUpdatePromptParameters}
defaultConfigModelName={aiconfigState.metadata.default_model}
/>
{i ===
aiconfigState.prompts.length -
1 /* TODO: Remove this check once we can add prompts at any index */ && (
<div className={classes.addPromptRow}>
<AddPromptButton
getModels={getModels}
addPrompt={(model: string) =>
onAddPrompt(
i + 1 /* insert below current prompt index */,
model
)
}
/>
</div>
)}
</Stack>
);
})}
</Container>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,29 @@
import { ClientAIConfig, ClientPrompt } from "../shared/types";
import { getPromptModelName } from "../utils/promptUtils";
import { AIConfig, JSONObject, PromptInput } from "aiconfig";
import { AIConfig, JSONObject, Prompt, PromptInput } from "aiconfig";

type AIConfigReducerAction =
export type AIConfigReducerAction =
| MutateAIConfigAction
| ConsolidateAIConfigAction;

export type MutateAIConfigAction =
| AddPromptAction
| UpdatePromptInputAction
| UpdatePromptModelSettingsAction
| UpdatePromptParametersAction;

export type ConsolidateAIConfigAction = {
type: "CONSOLIDATE_AICONFIG";
action: MutateAIConfigAction;
config: AIConfig;
};

export type AddPromptAction = {
type: "ADD_PROMPT_AT_INDEX";
index: number;
prompt: Prompt;
};

export type UpdatePromptInputAction = {
type: "UPDATE_PROMPT_INPUT";
index: number;
Expand Down Expand Up @@ -50,11 +67,60 @@ function reduceReplaceInput(
}));
}

function reduceInsertPromptAtIndex(
state: ClientAIConfig,
index: number,
prompt: ClientPrompt
): ClientAIConfig {
return {
...state,
prompts: [
...state.prompts.slice(0, index),
prompt,
...state.prompts.slice(index),
],
};
}

function reduceConsolidateAIConfig(
state: ClientAIConfig,
action: MutateAIConfigAction,
responseConfig: AIConfig
): ClientAIConfig {
switch (action.type) {
case "ADD_PROMPT_AT_INDEX": {
// Make sure prompt structure is properly updated. Client input and metadata takes precedence
// since it may have been updated by the user while the request was in flight
return reduceReplacePrompt(state, action.index, (prompt) => {
const responsePrompt = responseConfig.prompts[action.index];
return {
...responsePrompt,
...prompt,
metadata: {
...responsePrompt.metadata,
...prompt.metadata,
},
} as ClientPrompt;
});
}
default: {
return state;
}
}
}

export default function aiconfigReducer(
state: ClientAIConfig,
action: AIConfigReducerAction
): ClientAIConfig {
switch (action.type) {
case "ADD_PROMPT_AT_INDEX": {
return reduceInsertPromptAtIndex(
state,
action.index,
action.prompt as ClientPrompt
);
}
case "UPDATE_PROMPT_INPUT": {
return reduceReplaceInput(state, action.index, () => action.input);
}
Expand Down Expand Up @@ -84,5 +150,8 @@ export default function aiconfigReducer(
},
}));
}
case "CONSOLIDATE_AICONFIG": {
return reduceConsolidateAIConfig(state, action.action, action.config);
}
}
}
Loading

0 comments on commit 276af8d

Please sign in to comment.