From 5a75379bac50eac20c91899f8f6f785026f8ee2e Mon Sep 17 00:00:00 2001 From: "Rossdan Craig rossdan@lastmileai.dev" <> Date: Mon, 1 Jan 2024 21:59:35 -0500 Subject: [PATCH] [python] Created `get_parameters` API **Disclaimer:** I know this is 400+ lines changed, but ~300 of those are just adding/refactoring automated tests!!! We already had `get_prompt_parameters`, but this isn't good enough if parameters aren't defined locally and we want to bubble up to the aiconfig defaults. See comment in https://github.com/lastmile-ai/aiconfig/pull/661#discussion_r1437941729. This is important so that we can ensure we have valid params if not defined in the correct section of our AIConfig schema. This is similar in theme to https://github.com/lastmile-ai/aiconfig/pull/600 The only callsite we have today for this is in python in the `params.py` function, so I updated that callsite too. I don't know why it doesn't exist in Typescript (cc @rholinshead) but I also added this functionality in typescript in https://github.com/lastmile-ai/aiconfig/pull/669 (I'm not as familiar with automated testing there so will ask Ryan for a code pointer to help get me started) ## Test plan Added a bunch of automated tests which you can run by doing going to the `aiconfig/python` dir and running `pytest` --- python/src/aiconfig/schema.py | 87 ++++- python/src/aiconfig/util/params.py | 2 +- python/tests/test_parameter_api.py | 326 ++++++++++++++++++ ...est_programmatically_create_an_AIConfig.py | 91 +---- 4 files changed, 407 insertions(+), 99 deletions(-) create mode 100644 python/tests/test_parameter_api.py diff --git a/python/src/aiconfig/schema.py b/python/src/aiconfig/schema.py index df23faa43..617c08b5d 100644 --- a/python/src/aiconfig/schema.py +++ b/python/src/aiconfig/schema.py @@ -335,6 +335,85 @@ def get_metadata(self, prompt_name: Optional[str] = None): else: return self.metadata + + def get_parameters( + self, + prompt_or_prompt_name: Optional[str | Prompt] = None, + ) -> JSONObject: + """ + Get the parameters for a prompt, using the global parameters if + needed. + + Args: + prompt_or_prompt_name Optional[str | Prompt]: The name of the + prompt or the prompt object. If not specified, use the + global parameters. + """ + prompt = prompt_or_prompt_name + if isinstance(prompt_or_prompt_name, str): + if prompt_or_prompt_name not in self.prompt_index: + raise IndexError(f"Prompt '{prompt_or_prompt_name}' not found in config, available prompts are:\n {list(self.prompt_index.keys())}") + prompt = self.prompt_index[prompt_or_prompt_name] + + assert prompt is None or isinstance(prompt, Prompt) + if prompt is None or not prompt.metadata or not prompt.metadata.parameters: + return self.get_global_parameters() + + return self.get_prompt_parameters(prompt) + + # pylint: disable=W0102 + def get_global_parameters( + self, + default_return_value: JSONObject = {}, + ) -> JSONObject: + """ + Get the global parameters for the AIConfig. If they're not defined, + return a default value ({} unless overridden) + + Args: + default_return_value JSONObject - Default value to return if + global parameters are not defined. + """ + return self._get_global_parameters_exact() or default_return_value + # pylint: enable=W0102 + + def _get_global_parameters_exact(self) -> JSONObject | None: + """ + Get the global parameters for the AIConfig. This should be the + the explicit value (ie: if parameters is None, return None, not {}) + """ + return self.metadata.parameters + + # pylint: disable=W0102 + def get_prompt_parameters( + self, + prompt: Prompt, + default_return_value: JSONObject = {}, + ) -> JSONObject: + """ + Get the prompt's local parameters. If they're not defined, + return a default value ({} unless overridden) + + Args: + default_return_value JSONObject - Default value to return if + prompt parameters are not defined. + """ + return self._get_prompt_parameters_exact(prompt) \ + or default_return_value + # pylint: enable=W0102 + + def _get_prompt_parameters_exact( + self, + prompt: Prompt, + ) -> JSONObject | None: + """ + Get the global parameters for the AIConfig. This should be the + the explicit value (ie: if parameters is None, return None, not {}) + """ + if not prompt.metadata: + return prompt.metadata + return prompt.metadata.parameters + def set_parameter(self, parameter_name: str, parameter_value, prompt_name: Optional[str] = None): """ Sets a parameter in the AI configuration metadata. If a prompt_name is specified, it adds the parameter to @@ -733,14 +812,6 @@ def get_output_text(self, prompt: str | Prompt): prompt (str|Prompt): The name of the prompt or the prompt object. """ - def get_prompt_parameters(self, prompt: Prompt): - """ - Gets the prompt's local parameters for a prompt. - """ - if not prompt.metadata: - return {} - return prompt.metadata.parameters - """ Library Helpers """ diff --git a/python/src/aiconfig/util/params.py b/python/src/aiconfig/util/params.py index d81ba4bfe..8fcc72c1e 100644 --- a/python/src/aiconfig/util/params.py +++ b/python/src/aiconfig/util/params.py @@ -310,7 +310,7 @@ def resolve_prompt_string( augmented_params = collect_prompt_references(current_prompt, ai_config) # augment params with config-level params - augmented_params.update(ai_config.metadata.parameters) + augmented_params.update(ai_config.get_global_parameters()) # augment params with prompt level params augmented_params.update(ai_config.get_prompt_parameters(current_prompt)) diff --git a/python/tests/test_parameter_api.py b/python/tests/test_parameter_api.py new file mode 100644 index 000000000..e33371bb5 --- /dev/null +++ b/python/tests/test_parameter_api.py @@ -0,0 +1,326 @@ +import pytest +from aiconfig.Config import AIConfigRuntime +from aiconfig.util.config_utils import extract_override_settings + +from aiconfig.schema import ( + AIConfig, + ConfigMetadata, + ExecuteResult, + ModelMetadata, + Prompt, + PromptMetadata, +) + + +@pytest.fixture +def ai_config_runtime(): + runtime = AIConfigRuntime.create("Untitled AIConfig") + return runtime + +@pytest.fixture +def ai_config(): + config = AIConfig( + name="Untitled AIConfig", + schema_version="latest", + metadata=ConfigMetadata(), + prompts=[], + ) + return config + +def test_delete_nonexistent_parameter(ai_config_runtime: AIConfigRuntime): + """ + Test deleting a nonexistent parameter. + """ + config = ai_config_runtime + parameter_name_to_delete = "param1" + config.add_prompt( + "prompt1", + Prompt( + name="prompt_name", + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ), + ) + + # Ensure deleting a nonexistent parameter raises a KeyError + with pytest.raises( + KeyError, match=f"Parameter '{parameter_name_to_delete}' does not exist." + ): + config.delete_parameter(parameter_name_to_delete) + + +def test_set_global_parameter(ai_config: AIConfig): + """ + Test setting a global parameter. + """ + parameter_name = "global_param" + parameter_value = "global_value" + + ai_config.set_parameter(parameter_name, parameter_value, prompt_name=None) + + # Ensure the global parameter is set correctly + assert ai_config.metadata.parameters[parameter_name] == parameter_value + + +def test_set_parameter_for_prompt(ai_config: AIConfig): + """ + Test setting a parameter for a specific prompt. + """ + prompt_name = "prompt1" + parameter_name = "prompt_param" + parameter_value = "prompt_value" + + # Create a sample prompt for testing + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + ai_config.set_parameter(parameter_name, parameter_value, prompt_name=prompt_name) + + # Ensure the parameter is set for the specific prompt + assert ( + ai_config.prompt_index[prompt_name].metadata.parameters[parameter_name] + == parameter_value + ) + assert ai_config.prompts[0].metadata.parameters[parameter_name] == parameter_value + + +def test_update_existing_parameter(ai_config: AIConfig): + """ + Test updating an existing parameter. + """ + parameter_name = "existing_param" + initial_value = "initial_value" + updated_value = "updated_value" + + ai_config.set_parameter(parameter_name, initial_value, prompt_name=None) + ai_config.update_parameter(parameter_name, updated_value, prompt_name=None) + + assert ai_config.metadata.parameters is not None + assert ai_config.metadata.parameters[parameter_name] == updated_value + + +def test_delete_existing_parameter(ai_config: AIConfig): + """ + Test deleting an existing parameter. + """ + parameter_name_to_delete = "param_to_delete" + parameter_value = "param_value" + + ai_config.set_parameter(parameter_name_to_delete, parameter_value, prompt_name=None) + ai_config.delete_parameter(parameter_name_to_delete, prompt_name=None) + + assert ai_config.metadata.parameters is not None + assert parameter_name_to_delete not in ai_config.metadata.parameters + + +# | With both local and global (should use local override) +# | Without AIConfig but local is `{}` | +def test_get_parameter_prompt_has_parameters(ai_config: AIConfig): + """ + Test getting a parameter for a prompt + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=prompt_name, + ) + + ai_config.set_parameter( + "this value", + "does not matter", + prompt_name=None, + ) + + parameters = ai_config.get_parameters(prompt_name) + assert ai_config.prompt_index["prompt1"].metadata is not None + assert parameters == ai_config.prompt_index["prompt1"].metadata.parameters + + +def test_get_parameter_prompt_has_no_metadata( + ai_config: AIConfig, +): + """ + Test getting a parameter when only aiconfig param is set. + Prompt does not have metadata. + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=None, + ) + + parameters = ai_config.get_parameters(prompt_name) + assert parameters == ai_config.metadata.parameters + + +def test_get_parameter_prompt_has_metadata_no_parameters( + ai_config: AIConfig +): + """ + Test getting a parameter when only aiconfig param is set. + Prompt has metadata but no parameters. + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=None, + ) + + parameters = ai_config.get_parameters(prompt_name) + assert parameters == ai_config.metadata.parameters + + +def test_get_parameter_prompt_has_empty_parameters( + ai_config: AIConfig +): + """ + Test getting a parameter when only aiconfig param is set. + Prompt has empty parameters. + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel", parameters={}), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=None, + ) + + parameters = ai_config.get_parameters(prompt_name) + assert parameters == ai_config.metadata.parameters + + +def test_get_parameter_prompt_has_empty_parameters( + ai_config: AIConfig +): + """ + Test getting a parameter when only aiconfig param is set. + Prompt has empty parameters. + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel", parameters={}), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=None, + ) + + parameters = ai_config.get_parameters(prompt_name) + assert parameters == ai_config.metadata.parameters + +def test_get_parameter_aiconfig_has_parameters(ai_config: AIConfig): + """ + Test getting a parameter for an aiconfig + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + ai_config.set_parameter( + "this does", + "this matter", + prompt_name=prompt_name, + ) + + parameter_name = "param1" + parameter_value = "param_value" + ai_config.set_parameter( + parameter_name, + parameter_value, + prompt_name=None, + ) + + parameters = ai_config.get_parameters() + assert parameters == ai_config.metadata.parameters + + +def test_get_parameter_aiconfig_no_parameters(ai_config: AIConfig): + """ + Test getting a parameter for an aiconfig when no parameters are set + on the aiconfig + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + ai_config.set_parameter( + "this does", + "this matter", + prompt_name=prompt_name, + ) + + parameters = ai_config.get_parameters() + assert parameters == {} + + +def test_get_parameter_prompt_no_parameters(ai_config: AIConfig): + """ + Test getting a parameter for a prompt when no parameters are set + on either the prompt of the aiconfig + """ + prompt_name = "prompt1" + prompt_data = Prompt( + name=prompt_name, + input="This is a prompt", + metadata=PromptMetadata(model="fakemodel"), + ) + ai_config.add_prompt(prompt_name, prompt_data) + + parameters = ai_config.get_parameters() + assert parameters == {} diff --git a/python/tests/test_programmatically_create_an_AIConfig.py b/python/tests/test_programmatically_create_an_AIConfig.py index 43bb8624b..09bd8729d 100644 --- a/python/tests/test_programmatically_create_an_AIConfig.py +++ b/python/tests/test_programmatically_create_an_AIConfig.py @@ -238,28 +238,6 @@ def test_get_metadata_with_nonexistent_prompt(ai_config_runtime: AIConfigRuntime config.get_metadata(prompt_name) -def test_delete_nonexistent_parameter(ai_config_runtime: AIConfigRuntime): - """ - Test deleting a nonexistent parameter. - """ - config = ai_config_runtime - parameter_name_to_delete = "param1" - config.add_prompt( - "prompt1", - Prompt( - name="prompt_name", - input="This is a prompt", - metadata=PromptMetadata(model="fakemodel"), - ), - ) - - # Ensure deleting a nonexistent parameter raises a KeyError - with pytest.raises( - KeyError, match=f"Parameter '{parameter_name_to_delete}' does not exist." - ): - config.delete_parameter(parameter_name_to_delete) - - @pytest.fixture def ai_config(): config = AIConfig( @@ -271,74 +249,6 @@ def ai_config(): return config -def test_set_global_parameter(ai_config: AIConfig): - """ - Test setting a global parameter. - """ - parameter_name = "global_param" - parameter_value = "global_value" - - ai_config.set_parameter(parameter_name, parameter_value, prompt_name=None) - - # Ensure the global parameter is set correctly - assert ai_config.metadata.parameters[parameter_name] == parameter_value - - -def test_set_parameter_for_prompt(ai_config: AIConfig): - """ - Test setting a parameter for a specific prompt. - """ - prompt_name = "prompt1" - parameter_name = "prompt_param" - parameter_value = "prompt_value" - - # Create a sample prompt for testing - prompt_data = Prompt( - name=prompt_name, - input="This is a prompt", - metadata=PromptMetadata(model="fakemodel"), - ) - ai_config.add_prompt(prompt_name, prompt_data) - - ai_config.set_parameter(parameter_name, parameter_value, prompt_name=prompt_name) - - # Ensure the parameter is set for the specific prompt - assert ( - ai_config.prompt_index[prompt_name].metadata.parameters[parameter_name] - == parameter_value - ) - assert ai_config.prompts[0].metadata.parameters[parameter_name] == parameter_value - - -def test_update_existing_parameter(ai_config: AIConfig): - """ - Test updating an existing parameter. - """ - parameter_name = "existing_param" - initial_value = "initial_value" - updated_value = "updated_value" - - ai_config.set_parameter(parameter_name, initial_value, prompt_name=None) - ai_config.update_parameter(parameter_name, updated_value, prompt_name=None) - - # Ensure the existing parameter is updated correctly - assert ai_config.metadata.parameters[parameter_name] == updated_value - - -def test_delete_existing_parameter(ai_config: AIConfig): - """ - Test deleting an existing parameter. - """ - parameter_name_to_delete = "param_to_delete" - parameter_value = "param_value" - - ai_config.set_parameter(parameter_name_to_delete, parameter_value, prompt_name=None) - ai_config.delete_parameter(parameter_name_to_delete, prompt_name=None) - - # Ensure the existing parameter is deleted correctly - assert parameter_name_to_delete not in ai_config.metadata.parameters - - def test_load_saved_config(tmp_path): """ Test loading a saved AIConfig from a JSON file. @@ -368,6 +278,7 @@ def test_load_saved_config(tmp_path): assert loaded_config.name == "My AIConfig" assert loaded_config.metadata.parameters == {"config_param": "config_value"} assert "prompt1" in loaded_config.prompt_index + assert loaded_config.prompt_index["prompt1"].metadata is not None assert loaded_config.prompt_index["prompt1"].metadata.parameters == { "prompt_param": "prompt_value" }