diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts index dfd398ca3..f293cf4fe 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts @@ -38,7 +38,7 @@ export const OpenAIChatModelParserPromptSchema: PromptSchema = { items: { type: "object", required: ["name", "parameters"], - parameters: { + properties: { name: { type: "string", }, diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts new file mode 100644 index 000000000..1986e99dc --- /dev/null +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts @@ -0,0 +1,61 @@ +import { PromptSchema } from "../../utils/promptUtils"; + +export const PaLMChatParserPromptSchema: PromptSchema = { + // See https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat for settings + // and defaults. The settings below are supported settings specified in the PaLMChatParser + // refine_chat_completion_params implementation. + input: { + type: "string", + }, + model_settings: { + type: "object", + properties: { + context: { + type: "string", + }, + candidate_count: { + type: "integer", + minimum: 1, + maximum: 4, + }, + temperature: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_p: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_k: { + type: "integer", + minimum: 1, + maximum: 40, + }, + examples: { + type: "array", + items: { + type: "object", + required: ["input", "output"], + properties: { + input: { + type: "string", + }, + output: { + type: "string", + }, + }, + }, + }, + }, + }, + prompt_metadata: { + type: "object", + properties: { + remember_chat_context: { + type: "boolean", + }, + }, + }, +}; diff --git a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts index 4f6886067..d1b22cbc4 100644 --- a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts +++ b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts @@ -3,6 +3,7 @@ import { OpenAIChatModelParserPromptSchema } from "../shared/prompt_schemas/Open import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schemas/OpenAIChatVisionModelParserPromptSchema"; import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema"; import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema"; +import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema"; /** * Get the name of the model for the specified prompt. The name will either be specified in the prompt's @@ -73,7 +74,7 @@ export const PROMPT_SCHEMAS: Record = { "models/text-bison-001": PaLMTextParserPromptSchema, // PaLMChatParser - // "models/chat-bison-001": + "models/chat-bison-001": PaLMChatParserPromptSchema, }; export type PromptInputSchema =