-
Notifications
You must be signed in to change notification settings - Fork 80
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[editor] HuggingFaceTextGenerationParserPromptSchema
# [editor] HuggingFaceTextGenerationParserPromptSchema Adding the PromptSchema for HuggingFaceTextGenerationParser, with supported properties from `refine_chat_completion_params` in the parser implementation. Types obtained from https://github.com/huggingface/huggingface_hub/blob/a331e82aad1bc63038194611236db28fa013814c/src/huggingface_hub/inference/_client.py#L1206 and defaults obtained from https://huggingface.co/docs/api-inference/detailed_parameters if they're listed <img width="1080" alt="Screenshot 2024-01-04 at 3 19 05 PM" src="https://github.com/lastmile-ai/aiconfig/assets/5060851/8cb40459-1f7c-42b2-8b35-0333c380be01"> Note, this is for the default/core parser which uses inference API. We'll need to add the other prompt schemas for the extension models for gradio. Will update the hf prompt UX to support actual 'model' in a subsequent diff
- Loading branch information
Ryan Holinshead
committed
Jan 4, 2024
1 parent
86890cd
commit 32be9b4
Showing
2 changed files
with
75 additions
and
1 deletion.
There are no files selected for viewing
73 changes: 73 additions & 0 deletions
73
...ig/editor/client/src/shared/prompt_schemas/HuggingFaceTextGenerationParserPromptSchema.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
import { PromptSchema } from "../../utils/promptUtils"; | ||
|
||
export const HuggingFaceTextGenerationParserPromptSchema: PromptSchema = { | ||
// See https://github.com/huggingface/huggingface_hub/blob/a331e82aad1bc63038194611236db28fa013814c/src/huggingface_hub/inference/_client.py#L1206 | ||
// for settings and https://huggingface.co/docs/api-inference/detailed_parameters for defaults. | ||
// The settings below are supported settings specified in the HuggingFaceTextGenerationParser | ||
// refine_chat_completion_params implementation. | ||
input: { | ||
type: "string", | ||
}, | ||
model_settings: { | ||
type: "object", | ||
properties: { | ||
model: { | ||
type: "string", | ||
}, | ||
temperature: { | ||
type: "number", | ||
minimum: 0, | ||
maximum: 1, | ||
}, | ||
top_k: { | ||
type: "integer", | ||
}, | ||
top_p: { | ||
type: "number", | ||
minimum: 0, | ||
maximum: 1, | ||
}, | ||
details: { | ||
type: "boolean", | ||
}, | ||
stream: { | ||
type: "boolean", | ||
}, | ||
do_sample: { | ||
type: "boolean", | ||
}, | ||
max_new_tokens: { | ||
type: "integer", | ||
}, | ||
best_of: { | ||
type: "integer", | ||
}, | ||
repetition_penalty: { | ||
type: "number", | ||
minimum: 0, | ||
maximum: 1, | ||
}, | ||
return_full_text: { | ||
type: "boolean", | ||
}, | ||
seed: { | ||
type: "integer", | ||
}, | ||
stop_sequences: { | ||
type: "array", | ||
items: { | ||
type: "string", | ||
}, | ||
}, | ||
truncate: { | ||
type: "integer", | ||
}, | ||
typical_p: { | ||
type: "number", | ||
}, | ||
watermark: { | ||
type: "boolean", | ||
}, | ||
}, | ||
}, | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters