Skip to content

Commit

Permalink
Update getModels to Make Search String Optional
Browse files Browse the repository at this point in the history
# Update getModels to Make Search String Optional

In a few cases, we don't have a search string to use for the getModels call and it's weird to need to pass in an empty string to represent that case. Just updating the implementation to make the search string optional

## Testing:
- Make sure all getModels calls work (add prompt, prompt model selector)
  • Loading branch information
Ryan Holinshead committed Mar 12, 2024
1 parent 89ad24c commit c20a42b
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 21 deletions.
11 changes: 3 additions & 8 deletions python/src/aiconfig/editor/client/src/LocalEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ const useStyles = createStyles(() => ({
},
}));


const MODE = "local";

export default function LocalEditor() {
Expand Down Expand Up @@ -75,7 +74,7 @@ export default function LocalEditor() {
sessionSampleRate: 100,
});

datadogLogs.setGlobalContextProperty('mode', MODE);
datadogLogs.setGlobalContextProperty("mode", MODE);
}
}, []);

Expand All @@ -91,7 +90,7 @@ export default function LocalEditor() {
return res;
}, []);

const getModels = useCallback(async (search: string) => {
const getModels = useCallback(async (search?: string) => {
// For now, rely on caching and handle client-side search filtering
// We will use server-side search filtering for Gradio
const res = await ufetch.get(ROUTE_TABLE.LIST_MODELS);
Expand Down Expand Up @@ -289,11 +288,7 @@ export default function LocalEditor() {
<Loader size="xl" />
</Flex>
) : (
<AIConfigEditor
aiconfig={aiconfig}
callbacks={callbacks}
mode={MODE}
/>
<AIConfigEditor aiconfig={aiconfig} callbacks={callbacks} mode={MODE} />
)}
</div>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ export type AIConfigCallbacks = {
deletePrompt: (promptName: string) => Promise<void>;
download?: () => Promise<void>;
openInTextEditor?: () => Promise<void>;
getModels: (search: string) => Promise<string[]>;
getModels: (search?: string) => Promise<string[]>;
getServerStatus?: () => Promise<{ status: "OK" | "ERROR" }>;
logEventHandler?: (event: LogEvent, data?: LogEventData) => void;
runPrompt: (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { PROMPT_CELL_LEFT_MARGIN_PX } from "../../utils/constants";

type Props = {
addPrompt: (prompt: string) => void;
getModels?: (search: string) => Promise<string[]>;
getModels?: (search?: string) => Promise<string[]>;
};

const useStyles = createStyles((theme) => ({
Expand Down Expand Up @@ -74,7 +74,7 @@ function ModelMenuItems({
}

export default memo(function AddPromptButton({ addPrompt, getModels }: Props) {
const [modelSearch, setModelSearch] = useState("");
const [modelSearch, setModelSearch] = useState<string | undefined>();
const [isOpen, setIsOpen] = useState(false);

const onAddPrompt = useCallback(
Expand All @@ -85,7 +85,7 @@ export default memo(function AddPromptButton({ addPrompt, getModels }: Props) {
[addPrompt]
);

const models = useLoadModels(modelSearch, getModels);
const models = useLoadModels(getModels, modelSearch);
const { classes } = useStyles();

return (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import AIConfigContext from "../../contexts/AIConfigContext";

type Props = {
prompt: Prompt;
getModels?: (search: string) => Promise<string[]>;
getModels?: (search?: string) => Promise<string[]>;
onSetModel: (model?: string) => void;
defaultConfigModelName?: string;
};
Expand All @@ -30,8 +30,8 @@ export default memo(function ModelSelector({
);

const models = useLoadModels(
showAll ? "" : autocompleteSearch ?? "",
getModels
getModels,
showAll ? undefined : autocompleteSearch
);

const onSelectModel = (model?: string) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import { debounce } from "lodash";
type Props = {
prompt: ClientPrompt;
cancel?: (cancellationToken: string) => Promise<void>;
getModels?: (search: string) => Promise<string[]>;
getModels?: (search?: string) => Promise<string[]>;
onChangePromptInput: (
promptId: string,
newPromptInput: AIConfigPromptInput
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { JSONObject, PromptInput } from "aiconfig";
type Props = {
cancelRunPrompt?: (cancellationToken: string) => Promise<void>;
defaultModel?: string;
getModels?: (search: string) => Promise<string[]>;
getModels?: (search?: string) => Promise<string[]>;
onAddPrompt: (promptIndex: number, model: string) => Promise<void>;
onChangePromptInput: (
promptId: string,
Expand Down
6 changes: 3 additions & 3 deletions python/src/aiconfig/editor/client/src/hooks/useLoadModels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ import NotificationContext from "../components/notifications/NotificationContext
import AIConfigContext from "../contexts/AIConfigContext";

export default function useLoadModels(
modelSearch: string,
getModels?: (search: string) => Promise<string[]>
getModels?: (search?: string) => Promise<string[]>,
modelSearch?: string
) {
const [models, setModels] = useState<string[]>([]);
const { showNotification } = useContext(NotificationContext);
const { readOnly } = useContext(AIConfigContext);

const loadModels = useCallback(
async (modelSearch: string) => {
async (modelSearch?: string) => {
if (!getModels || readOnly) {
return;
}
Expand Down
2 changes: 1 addition & 1 deletion vscode-extension/editor/src/VSCodeEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ export default function VSCodeEditor() {
}, [setupTelemetryIfAllowed]);

const getModels = useCallback(
async (search: string) => {
async (search?: string) => {
// For now, rely on caching and handle client-side search filtering
// We will use server-side search filtering for Gradio
const res = await ufetch.get(ROUTE_TABLE.LIST_MODELS(aiConfigServerUrl));
Expand Down

0 comments on commit c20a42b

Please sign in to comment.