Skip to content

Commit

Permalink
[editor] PaLMChatParserPromptSchema
Browse files Browse the repository at this point in the history
# [editor] PaLMChatParserPromptSchema

Implement the Prompt Schema for PaLM Chat, using the supported params with defaults from https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat, selecting those that are defined in the `refine_chat_completion_params` of palm.py:
```
def refine_chat_completion_params(model_settings):
    # completion parameters to be used for Palm's chat completion api
    # messages handled seperately
    supported_keys = {
        "candidate_count",
        "examples",
        "model",
        "temperature",
        "top_k",
        "top_p",
        "context",
    }
 ```

<img width="1853" alt="Screenshot 2024-01-04 at 1 59 24 PM" src="https://github.com/lastmile-ai/aiconfig/assets/5060851/50e08b43-2956-4fa0-98e4-4ab165efb6a9">
  • Loading branch information
Ryan Holinshead committed Jan 4, 2024
1 parent e926e4e commit 86890cd
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export const OpenAIChatModelParserPromptSchema: PromptSchema = {
items: {
type: "object",
required: ["name", "parameters"],
parameters: {
properties: {
name: {
type: "string",
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { PromptSchema } from "../../utils/promptUtils";

export const PaLMChatParserPromptSchema: PromptSchema = {
// See https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat for settings
// and defaults. The settings below are supported settings specified in the PaLMChatParser
// refine_chat_completion_params implementation.
input: {
type: "string",
},
model_settings: {
type: "object",
properties: {
context: {
type: "string",
},
candidate_count: {
type: "integer",
minimum: 1,
maximum: 4,
},
temperature: {
type: "number",
minimum: 0,
maximum: 1,
},
top_p: {
type: "number",
minimum: 0,
maximum: 1,
},
top_k: {
type: "integer",
minimum: 1,
maximum: 40,
},
examples: {
type: "array",
items: {
type: "object",
required: ["input", "output"],
properties: {
input: {
type: "string",
},
output: {
type: "string",
},
},
},
},
},
},
prompt_metadata: {
type: "object",
properties: {
remember_chat_context: {
type: "boolean",
},
},
},
};
3 changes: 2 additions & 1 deletion python/src/aiconfig/editor/client/src/utils/promptUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { OpenAIChatModelParserPromptSchema } from "../shared/prompt_schemas/Open
import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schemas/OpenAIChatVisionModelParserPromptSchema";
import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema";
import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema";
import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema";

/**
* Get the name of the model for the specified prompt. The name will either be specified in the prompt's
Expand Down Expand Up @@ -73,7 +74,7 @@ export const PROMPT_SCHEMAS: Record<string, PromptSchema> = {
"models/text-bison-001": PaLMTextParserPromptSchema,

// PaLMChatParser
// "models/chat-bison-001":
"models/chat-bison-001": PaLMChatParserPromptSchema,
};

export type PromptInputSchema =
Expand Down

0 comments on commit 86890cd

Please sign in to comment.