From 4f96967f79c6eab62ae4466999b3663185428067 Mon Sep 17 00:00:00 2001 From: Ryan Holinshead <> Date: Sat, 6 Jan 2024 16:36:31 -0500 Subject: [PATCH] [editor][easy] Add 'model' to relevant Prompt Schemas # [editor][easy] Add 'model' to relevant Prompt Schemas There is still some investigation needed to know how to handle model vs parser nicely in the editor UX. For now, let's just expose the 'model' option in the settings for those parsers that support it to ensure the usage of those parsers won't be blocked. --- .../prompt_schemas/DalleImageGenerationParserPromptSchema.ts | 3 +++ .../shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts | 3 +++ .../src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts | 3 +++ .../src/shared/prompt_schemas/PaLMTextParserPromptSchema.ts | 3 +++ 4 files changed, 12 insertions(+) diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/DalleImageGenerationParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/DalleImageGenerationParserPromptSchema.ts index f2b8ef211..4037cbaaa 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/DalleImageGenerationParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/DalleImageGenerationParserPromptSchema.ts @@ -10,6 +10,9 @@ export const DalleImageGenerationParserPromptSchema: PromptSchema = { model_settings: { type: "object", properties: { + model: { + type: "string", + }, n: { type: "integer", minimum: 1, diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts index 22c888350..f8dc0886a 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts @@ -8,6 +8,9 @@ export const OpenAIChatModelParserPromptSchema: PromptSchema = { model_settings: { type: "object", properties: { + model: { + type: "string", + }, system_prompt: { type: "string", }, diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts index 1986e99dc..9f2ac29a0 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts @@ -10,6 +10,9 @@ export const PaLMChatParserPromptSchema: PromptSchema = { model_settings: { type: "object", properties: { + model: { + type: "string", + }, context: { type: "string", }, diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMTextParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMTextParserPromptSchema.ts index 64c0cfacc..0a3d33f24 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMTextParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMTextParserPromptSchema.ts @@ -10,6 +10,9 @@ export const PaLMTextParserPromptSchema: PromptSchema = { model_settings: { type: "object", properties: { + model: { + type: "string", + }, candidate_count: { type: "integer", minimum: 1,