From 86890cdb9c20a7ab26adb606106aa003aa80c0e5 Mon Sep 17 00:00:00 2001
From: Ryan Holinshead <>
Date: Thu, 4 Jan 2024 14:05:53 -0500
Subject: [PATCH] [editor] PaLMChatParserPromptSchema
# [editor] PaLMChatParserPromptSchema
Implement the Prompt Schema for PaLM Chat, using the supported params with defaults from https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat, selecting those that are defined in the `refine_chat_completion_params` of palm.py:
```
def refine_chat_completion_params(model_settings):
# completion parameters to be used for Palm's chat completion api
# messages handled seperately
supported_keys = {
"candidate_count",
"examples",
"model",
"temperature",
"top_k",
"top_p",
"context",
}
```
---
.../OpenAIChatModelParserPromptSchema.ts | 2 +-
.../PaLMChatParserPromptSchema.ts | 61 +++++++++++++++++++
.../editor/client/src/utils/promptUtils.ts | 3 +-
3 files changed, 64 insertions(+), 2 deletions(-)
create mode 100644 python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts
diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts
index dfd398ca3..f293cf4fe 100644
--- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts
+++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts
@@ -38,7 +38,7 @@ export const OpenAIChatModelParserPromptSchema: PromptSchema = {
items: {
type: "object",
required: ["name", "parameters"],
- parameters: {
+ properties: {
name: {
type: "string",
},
diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts
new file mode 100644
index 000000000..1986e99dc
--- /dev/null
+++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts
@@ -0,0 +1,61 @@
+import { PromptSchema } from "../../utils/promptUtils";
+
+export const PaLMChatParserPromptSchema: PromptSchema = {
+ // See https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat for settings
+ // and defaults. The settings below are supported settings specified in the PaLMChatParser
+ // refine_chat_completion_params implementation.
+ input: {
+ type: "string",
+ },
+ model_settings: {
+ type: "object",
+ properties: {
+ context: {
+ type: "string",
+ },
+ candidate_count: {
+ type: "integer",
+ minimum: 1,
+ maximum: 4,
+ },
+ temperature: {
+ type: "number",
+ minimum: 0,
+ maximum: 1,
+ },
+ top_p: {
+ type: "number",
+ minimum: 0,
+ maximum: 1,
+ },
+ top_k: {
+ type: "integer",
+ minimum: 1,
+ maximum: 40,
+ },
+ examples: {
+ type: "array",
+ items: {
+ type: "object",
+ required: ["input", "output"],
+ properties: {
+ input: {
+ type: "string",
+ },
+ output: {
+ type: "string",
+ },
+ },
+ },
+ },
+ },
+ },
+ prompt_metadata: {
+ type: "object",
+ properties: {
+ remember_chat_context: {
+ type: "boolean",
+ },
+ },
+ },
+};
diff --git a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts
index 4f6886067..d1b22cbc4 100644
--- a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts
+++ b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts
@@ -3,6 +3,7 @@ import { OpenAIChatModelParserPromptSchema } from "../shared/prompt_schemas/Open
import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schemas/OpenAIChatVisionModelParserPromptSchema";
import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema";
import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema";
+import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema";
/**
* Get the name of the model for the specified prompt. The name will either be specified in the prompt's
@@ -73,7 +74,7 @@ export const PROMPT_SCHEMAS: Record = {
"models/text-bison-001": PaLMTextParserPromptSchema,
// PaLMChatParser
- // "models/chat-bison-001":
+ "models/chat-bison-001": PaLMChatParserPromptSchema,
};
export type PromptInputSchema =