From c20a42b867d8f7106782637d206c0c62869b0d86 Mon Sep 17 00:00:00 2001
From: Ryan Holinshead <>
Date: Tue, 12 Mar 2024 15:04:33 -0400
Subject: [PATCH] Update getModels to Make Search String Optional
# Update getModels to Make Search String Optional
In a few cases, we don't have a search string to use for the getModels call and it's weird to need to pass in an empty string to represent that case. Just updating the implementation to make the search string optional
## Testing:
- Make sure all getModels calls work (add prompt, prompt model selector)
---
python/src/aiconfig/editor/client/src/LocalEditor.tsx | 11 +++--------
.../editor/client/src/components/AIConfigEditor.tsx | 2 +-
.../client/src/components/prompt/AddPromptButton.tsx | 6 +++---
.../client/src/components/prompt/ModelSelector.tsx | 6 +++---
.../client/src/components/prompt/PromptContainer.tsx | 2 +-
.../client/src/components/prompt/PromptsContainer.tsx | 2 +-
.../aiconfig/editor/client/src/hooks/useLoadModels.ts | 6 +++---
vscode-extension/editor/src/VSCodeEditor.tsx | 2 +-
8 files changed, 16 insertions(+), 21 deletions(-)
diff --git a/python/src/aiconfig/editor/client/src/LocalEditor.tsx b/python/src/aiconfig/editor/client/src/LocalEditor.tsx
index 3e664856f..4b80ac158 100644
--- a/python/src/aiconfig/editor/client/src/LocalEditor.tsx
+++ b/python/src/aiconfig/editor/client/src/LocalEditor.tsx
@@ -37,7 +37,6 @@ const useStyles = createStyles(() => ({
},
}));
-
const MODE = "local";
export default function LocalEditor() {
@@ -75,7 +74,7 @@ export default function LocalEditor() {
sessionSampleRate: 100,
});
- datadogLogs.setGlobalContextProperty('mode', MODE);
+ datadogLogs.setGlobalContextProperty("mode", MODE);
}
}, []);
@@ -91,7 +90,7 @@ export default function LocalEditor() {
return res;
}, []);
- const getModels = useCallback(async (search: string) => {
+ const getModels = useCallback(async (search?: string) => {
// For now, rely on caching and handle client-side search filtering
// We will use server-side search filtering for Gradio
const res = await ufetch.get(ROUTE_TABLE.LIST_MODELS);
@@ -289,11 +288,7 @@ export default function LocalEditor() {
) : (
-
+
)}
);
diff --git a/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx b/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx
index 06bf09dd7..94301ebf0 100644
--- a/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx
+++ b/python/src/aiconfig/editor/client/src/components/AIConfigEditor.tsx
@@ -125,7 +125,7 @@ export type AIConfigCallbacks = {
deletePrompt: (promptName: string) => Promise;
download?: () => Promise;
openInTextEditor?: () => Promise;
- getModels: (search: string) => Promise;
+ getModels: (search?: string) => Promise;
getServerStatus?: () => Promise<{ status: "OK" | "ERROR" }>;
logEventHandler?: (event: LogEvent, data?: LogEventData) => void;
runPrompt: (
diff --git a/python/src/aiconfig/editor/client/src/components/prompt/AddPromptButton.tsx b/python/src/aiconfig/editor/client/src/components/prompt/AddPromptButton.tsx
index 256c828d1..3b20e6ae8 100644
--- a/python/src/aiconfig/editor/client/src/components/prompt/AddPromptButton.tsx
+++ b/python/src/aiconfig/editor/client/src/components/prompt/AddPromptButton.tsx
@@ -14,7 +14,7 @@ import { PROMPT_CELL_LEFT_MARGIN_PX } from "../../utils/constants";
type Props = {
addPrompt: (prompt: string) => void;
- getModels?: (search: string) => Promise;
+ getModels?: (search?: string) => Promise;
};
const useStyles = createStyles((theme) => ({
@@ -74,7 +74,7 @@ function ModelMenuItems({
}
export default memo(function AddPromptButton({ addPrompt, getModels }: Props) {
- const [modelSearch, setModelSearch] = useState("");
+ const [modelSearch, setModelSearch] = useState();
const [isOpen, setIsOpen] = useState(false);
const onAddPrompt = useCallback(
@@ -85,7 +85,7 @@ export default memo(function AddPromptButton({ addPrompt, getModels }: Props) {
[addPrompt]
);
- const models = useLoadModels(modelSearch, getModels);
+ const models = useLoadModels(getModels, modelSearch);
const { classes } = useStyles();
return (
diff --git a/python/src/aiconfig/editor/client/src/components/prompt/ModelSelector.tsx b/python/src/aiconfig/editor/client/src/components/prompt/ModelSelector.tsx
index 187205aab..e21e64e30 100644
--- a/python/src/aiconfig/editor/client/src/components/prompt/ModelSelector.tsx
+++ b/python/src/aiconfig/editor/client/src/components/prompt/ModelSelector.tsx
@@ -8,7 +8,7 @@ import AIConfigContext from "../../contexts/AIConfigContext";
type Props = {
prompt: Prompt;
- getModels?: (search: string) => Promise;
+ getModels?: (search?: string) => Promise;
onSetModel: (model?: string) => void;
defaultConfigModelName?: string;
};
@@ -30,8 +30,8 @@ export default memo(function ModelSelector({
);
const models = useLoadModels(
- showAll ? "" : autocompleteSearch ?? "",
- getModels
+ getModels,
+ showAll ? undefined : autocompleteSearch
);
const onSelectModel = (model?: string) => {
diff --git a/python/src/aiconfig/editor/client/src/components/prompt/PromptContainer.tsx b/python/src/aiconfig/editor/client/src/components/prompt/PromptContainer.tsx
index 4ea054860..86985f18a 100644
--- a/python/src/aiconfig/editor/client/src/components/prompt/PromptContainer.tsx
+++ b/python/src/aiconfig/editor/client/src/components/prompt/PromptContainer.tsx
@@ -15,7 +15,7 @@ import { debounce } from "lodash";
type Props = {
prompt: ClientPrompt;
cancel?: (cancellationToken: string) => Promise;
- getModels?: (search: string) => Promise;
+ getModels?: (search?: string) => Promise;
onChangePromptInput: (
promptId: string,
newPromptInput: AIConfigPromptInput
diff --git a/python/src/aiconfig/editor/client/src/components/prompt/PromptsContainer.tsx b/python/src/aiconfig/editor/client/src/components/prompt/PromptsContainer.tsx
index 228e88d18..4ddc3b49a 100644
--- a/python/src/aiconfig/editor/client/src/components/prompt/PromptsContainer.tsx
+++ b/python/src/aiconfig/editor/client/src/components/prompt/PromptsContainer.tsx
@@ -10,7 +10,7 @@ import { JSONObject, PromptInput } from "aiconfig";
type Props = {
cancelRunPrompt?: (cancellationToken: string) => Promise;
defaultModel?: string;
- getModels?: (search: string) => Promise;
+ getModels?: (search?: string) => Promise;
onAddPrompt: (promptIndex: number, model: string) => Promise;
onChangePromptInput: (
promptId: string,
diff --git a/python/src/aiconfig/editor/client/src/hooks/useLoadModels.ts b/python/src/aiconfig/editor/client/src/hooks/useLoadModels.ts
index 62595bd51..54111b12d 100644
--- a/python/src/aiconfig/editor/client/src/hooks/useLoadModels.ts
+++ b/python/src/aiconfig/editor/client/src/hooks/useLoadModels.ts
@@ -3,15 +3,15 @@ import NotificationContext from "../components/notifications/NotificationContext
import AIConfigContext from "../contexts/AIConfigContext";
export default function useLoadModels(
- modelSearch: string,
- getModels?: (search: string) => Promise
+ getModels?: (search?: string) => Promise,
+ modelSearch?: string
) {
const [models, setModels] = useState([]);
const { showNotification } = useContext(NotificationContext);
const { readOnly } = useContext(AIConfigContext);
const loadModels = useCallback(
- async (modelSearch: string) => {
+ async (modelSearch?: string) => {
if (!getModels || readOnly) {
return;
}
diff --git a/vscode-extension/editor/src/VSCodeEditor.tsx b/vscode-extension/editor/src/VSCodeEditor.tsx
index bce5ec9c7..16c2ef3c8 100644
--- a/vscode-extension/editor/src/VSCodeEditor.tsx
+++ b/vscode-extension/editor/src/VSCodeEditor.tsx
@@ -199,7 +199,7 @@ export default function VSCodeEditor() {
}, [setupTelemetryIfAllowed]);
const getModels = useCallback(
- async (search: string) => {
+ async (search?: string) => {
// For now, rely on caching and handle client-side search filtering
// We will use server-side search filtering for Gradio
const res = await ufetch.get(ROUTE_TABLE.LIST_MODELS(aiConfigServerUrl));