diff --git a/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts new file mode 100644 index 000000000..cc605cf9c --- /dev/null +++ b/python/src/aiconfig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts @@ -0,0 +1,73 @@ +import { PromptSchema } from "../../utils/promptUtils"; + +export const HuggingFaceTextGenerationParserPromptSchema: PromptSchema = { + // See https://github.com/huggingface/huggingface_hub/blob/a331e82aad1bc63038194611236db28fa013814c/src/huggingface_hub/inference/_client.py#L1206 + // for settings and https://huggingface.co/docs/api-inference/detailed_parameters for defaults. + // The settings below are supported settings specified in the HuggingFaceTextGenerationParser + // refine_chat_completion_params implementation. + input: { + type: "string", + }, + model_settings: { + type: "object", + properties: { + model: { + type: "string", + }, + temperature: { + type: "number", + minimum: 0, + maximum: 1, + }, + top_k: { + type: "integer", + }, + top_p: { + type: "number", + minimum: 0, + maximum: 1, + }, + details: { + type: "boolean", + }, + stream: { + type: "boolean", + }, + do_sample: { + type: "boolean", + }, + max_new_tokens: { + type: "integer", + }, + best_of: { + type: "integer", + }, + repetition_penalty: { + type: "number", + minimum: 0, + maximum: 1, + }, + return_full_text: { + type: "boolean", + }, + seed: { + type: "integer", + }, + stop_sequences: { + type: "array", + items: { + type: "string", + }, + }, + truncate: { + type: "integer", + }, + typical_p: { + type: "number", + }, + watermark: { + type: "boolean", + }, + }, + }, +}; diff --git a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts index d1b22cbc4..557bf3210 100644 --- a/python/src/aiconfig/editor/client/src/utils/promptUtils.ts +++ b/python/src/aiconfig/editor/client/src/utils/promptUtils.ts @@ -4,6 +4,7 @@ import { OpenAIChatVisionModelParserPromptSchema } from "../shared/prompt_schema import { DalleImageGenerationParserPromptSchema } from "../shared/prompt_schemas/DalleImageGenerationParserPromptSchema"; import { PaLMTextParserPromptSchema } from "../shared/prompt_schemas/PaLMTextParserPromptSchema"; import { PaLMChatParserPromptSchema } from "../shared/prompt_schemas/PaLMChatParserPromptSchema"; +import { HuggingFaceTextGenerationParserPromptSchema } from "../shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema"; /** * Get the name of the model for the specified prompt. The name will either be specified in the prompt's @@ -68,7 +69,7 @@ export const PROMPT_SCHEMAS: Record = { "dall-e-3": DalleImageGenerationParserPromptSchema, // HuggingFaceTextGenerationParser - // "HuggingFaceTextGenerationParser": + HuggingFaceTextGenerationParser: HuggingFaceTextGenerationParserPromptSchema, // PaLMTextParser "models/text-bison-001": PaLMTextParserPromptSchema,