From 86890cdb9c20a7ab26adb606106aa003aa80c0e5 Mon Sep 17 00:00:00 2001 From: Ryan Holinshead <> Date: Thu, 4 Jan 2024 14:05:53 -0500 Subject: [PATCH 1/2] [editor] PaLMChatParserPromptSchema # [editor] PaLMChatParserPromptSchema Implement the Prompt Schema for PaLM Chat, using the supported params with defaults from https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat, selecting those that are defined in the `refine_chat_completion_params` of palm.py: ``` def refine_chat_completion_params(model_settings): # completion parameters to be used for Palm's chat completion api # messages handled seperately supported_keys = { "candidate_count", "examples", "model", "temperature", "top_k", "top_p", "context", } ``` Screenshot 2024-01-04 at 1 59 24 PM --- .../OpenAIChatModelParserPromptSchema.ts | 2 +- .../PaLMChatParserPromptSchema.ts | 61 +++++++++++++++++++ .../editor/client/src/utils/promptUtils.ts | 3 +- 3 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts index dfd398ca3..f293cf4fe 100644 --- a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts @@ -38,7 +38,7 @@ export const OpenAIChatModelParserPromptSchema: PromptSchema = { items: { type: "object", required: ["name", "parameters"], - parameters: { + properties: { name: { type: "string", }, diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts new file mode 100644 index 000000000..1986e99dc --- /dev/null +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/PaLMChatParserPromptSchema.ts @@ -0,0 +1,61 @@ +import { PromptSchema } from "../../utils/promptUtils"; + +export const PaLMChatParserPromptSchema: PromptSchema = { + // See https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-chat for settings + // and defaults. The settings below are supported settings specified in the PaLMChatParser + // refine_chat_completion_params implementation. + input: { + type: "string", + }, + model_settings: { + type: "object", + properties: { + context: { + type: "string", + }, + candidate_count: { + type: "integer", + minimum: 1, + maximum: 4, + }, + temperature: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_p: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_k: { + type: "integer", + minimum: 1, + maximum: 40, + }, + examples: { + type: "array", + items: { + type: "object", + required: ["input", "output"], + properties: { + input: { + type: "string", + }, + output: { + type: "string", + }, + }, + }, + }, + }, + }, + prompt_metadata: { + type: "object", + properties: { + remember_chat_context: { + type: "boolean", + }, + }, + }, +}; diff --git a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts index 4f6886067..d1b22cbc4 100644 --- a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts +++ b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts @@ -3,6 +3,7 @@ import { OpenAIChatModelParserPromptSchema } from "../shared/prompt_schemas/Open import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schemas/OpenAIChatVisionModelParserPromptSchema"; import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema"; import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema"; +import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema"; /** * Get the name of the model for the specified prompt. The name will either be specified in the prompt's @@ -73,7 +74,7 @@ export const PROMPT_SCHEMAS: Record = { "models/text-bison-001": PaLMTextParserPromptSchema, // PaLMChatParser - // "models/chat-bison-001": + "models/chat-bison-001": PaLMChatParserPromptSchema, }; export type PromptInputSchema = From 32be9b4709e363f52aa460a50d801339b806ef91 Mon Sep 17 00:00:00 2001 From: Ryan Holinshead <> Date: Thu, 4 Jan 2024 15:36:46 -0500 Subject: [PATCH 2/2] [editor] HuggingFaceTextGenerationParserPromptSchema # [editor] HuggingFaceTextGenerationParserPromptSchema Adding the PromptSchema for HuggingFaceTextGenerationParser, with supported properties from `refine_chat_completion_params` in the parser implementation. Types obtained from https://github.com/huggingface/huggingface_hub/blob/a331e82aad1bc63038194611236db28fa013814c/src/huggingface_hub/inference/_client.py#L1206 and defaults obtained from https://huggingface.co/docs/api-inference/detailed_parameters if they're listed Screenshot 2024-01-04 at 3 19 05 PM Note, this is for the default/core parser which uses inference API. We'll need to add the other prompt schemas for the extension models for gradio. Will update the hf prompt UX to support actual 'model' in a subsequent diff --- ...ingFaceTextGenerationParserPromptSchema.ts | 73 +++++++++++++++++++ .../editor/client/src/utils/promptUtils.ts | 3 +- 2 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts new file mode 100644 index 000000000..cc605cf9c --- /dev/null +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts @@ -0,0 +1,73 @@ +import { PromptSchema } from "../../utils/promptUtils"; + +export const HuggingFaceTextGenerationParserPromptSchema: PromptSchema = { + // See https://github.com/huggingface/huggingface_hub/blob/a331e82aad1bc63038194611236db28fa013814c/src/huggingface_hub/inference/_client.py#L1206 + // for settings and https://huggingface.co/docs/api-inference/detailed_parameters for defaults. + // The settings below are supported settings specified in the HuggingFaceTextGenerationParser + // refine_chat_completion_params implementation. + input: { + type: "string", + }, + model_settings: { + type: "object", + properties: { + model: { + type: "string", + }, + temperature: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_k: { + type: "integer", + }, + top_p: { + type: "number", + minimum: 0, + maximum: 1, + }, + details: { + type: "boolean", + }, + stream: { + type: "boolean", + }, + do_sample: { + type: "boolean", + }, + max_new_tokens: { + type: "integer", + }, + best_of: { + type: "integer", + }, + repetition_penalty: { + type: "number", + minimum: 0, + maximum: 1, + }, + return_full_text: { + type: "boolean", + }, + seed: { + type: "integer", + }, + stop_sequences: { + type: "array", + items: { + type: "string", + }, + }, + truncate: { + type: "integer", + }, + typical_p: { + type: "number", + }, + watermark: { + type: "boolean", + }, + }, + }, +}; diff --git a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts index d1b22cbc4..557bf3210 100644 --- a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts +++ b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts @@ -4,6 +4,7 @@ import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schema import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema"; import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema"; import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema"; +import { HuggingFaceTextGenerationParserPromptSchema } from "../shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema"; /** * Get the name of the model for the specified prompt. The name will either be specified in the prompt's @@ -68,7 +69,7 @@ export const PROMPT_SCHEMAS: Record = { "dall-e-3": DalleImageGenerationParserPromptSchema, // HuggingFaceTextGenerationParser - // "HuggingFaceTextGenerationParser": + HuggingFaceTextGenerationParser: HuggingFaceTextGenerationParserPromptSchema, // PaLMTextParser "models/text-bison-001": PaLMTextParserPromptSchema,