Skip to content

Commit

Permalink
Create update_model endpoint (#688)
Browse files Browse the repository at this point in the history
Create update_model endpoint




TSIA, backend implementation of `update_model()`. The other changes were
from running the auto-formatter:
```
fd --glob '*.py'  python/src/aiconfig/editor/server | xargs python -m 'scripts.lint' --mode=fix --files
```

## Test Plan
Follow dev README to setup the local editor:
https://github.com/lastmile-ai/aiconfig/tree/main/python/src/aiconfig/editor#dev,
then run this command
```
curl http://localhost:8080/api/update_model -d curl http://localhost:8080/api/update_model -d '{"prompt_name":"get_activities", "model_name": "gpt-4", "settings": {"top_p": 0.9}}' -X POST -H 'Content-Type: application/json'
```
Notice that the model name is updated to `gpt-4`, and the `top_p` is now
at 0.9


https://github.com/lastmile-ai/aiconfig/assets/151060367/3031fa59-8925-495c-a5eb-e56ec65b7fba

---
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with
[ReviewStack](https://reviewstack.dev/lastmile-ai/aiconfig/pull/688).
* #693
* #692
* #691
* #690
* __->__ #688
* #670
* #668
  • Loading branch information
rossdanlm authored Jan 2, 2024
2 parents 6bb4f7c + 09ba906 commit 3fc6435
Show file tree
Hide file tree
Showing 4 changed files with 235 additions and 35 deletions.
17 changes: 16 additions & 1 deletion python/src/aiconfig/editor/server/server.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Any, Type
from typing import Any, Dict, Type

import lastmile_utils.lib.core.api as core_utils
import result
Expand Down Expand Up @@ -226,3 +226,18 @@ def delete_prompt() -> FlaskResponse:

operation = make_op_run_method(method_name)
return run_aiconfig_operation_with_request_json(aiconfig, request_json, f"method_{method_name}", operation, signature)


@app.route("/api/update_model", methods=["POST"])
def update_model() -> FlaskResponse:
state = get_server_state(app)
aiconfig = state.aiconfig
request_json = request.get_json()

model_name: str | None = request_json.get("model_name")
settings: Dict[str, Any] | None = request_json.get("settings")
prompt_name: str | None = request_json.get("prompt_name")

operation = make_op_run_method(MethodName("update_model"))
operation_args: Result[OpArgs, str] = result.Ok(OpArgs({"model_name": model_name, "settings": settings, "prompt_name": prompt_name}))
return run_aiconfig_operation_with_op_args(aiconfig, "update_model", operation, operation_args)
5 changes: 3 additions & 2 deletions python/src/aiconfig/editor/server/server_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,17 @@
from enum import Enum
from types import ModuleType
from typing import Any, Callable, NewType, Optional, Type, TypeVar, cast
from aiconfig.registry import ModelParserRegistry
from aiconfig.schema import Prompt, PromptMetadata

import lastmile_utils.lib.core.api as core_utils
import result
from aiconfig.Config import AIConfigRuntime
from aiconfig.registry import ModelParserRegistry
from flask import Flask
from pydantic import field_validator
from result import Err, Ok, Result

from aiconfig.schema import Prompt, PromptMetadata

MethodName = NewType("MethodName", str)

logging.getLogger("werkzeug").disabled = True
Expand Down
58 changes: 45 additions & 13 deletions python/src/aiconfig/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,25 +414,57 @@ def _get_prompt_parameters_exact(
return prompt.metadata
return prompt.metadata.parameters

def set_parameter(self, parameter_name: str, parameter_value, prompt_name: Optional[str] = None):
def set_parameter(
self,
parameter_name: str,
parameter_value : Union[str, JSONObject],
prompt_name: Optional[str] = None):
"""
Sets a parameter in the AI configuration metadata. If a prompt_name is specified, it adds the parameter to
a specific prompt's metadata in the AI configuration. Otherwise, it adds the parameter to the global metadata.
Sets a parameter in the AI configuration metadata. If a prompt_name
is specified, it adds the parameter to a specific prompt's metadata
in the AI configuration. Otherwise, it adds the parameter to the
global metadata.
Args:
parameter_name (str): The name of the parameter.
parameter_value: The value of the parameter. It can be more than just a string. It can be a string or a JSON object. For example:
{
person: {
firstname: "john",
lastname: "smith",
},
}
Using the parameter in a prompt with handlebars syntax would look like this:
"{{person.firstname}} {{person.lastname}}"
prompt_name (str, optional): The name of the prompt to add the parameter to. Defaults to None.
parameter_value: The value of the parameter. It can be more than
just a string. It can be a string or a JSON object. For
example:
{
person: {
firstname: "john",
lastname: "smith",
},
}
Using the parameter in a prompt with handlebars syntax would
look like this:
"{{person.firstname}} {{person.lastname}}"
prompt_name (str, optional): The name of the prompt to add the
parameter to. Defaults to None.
"""
target_metadata = self.get_metadata(prompt_name)
if not target_metadata:
# Technically this check is not needed since the metadata is a
# required field in Config while it is not required in Prompt.
# Therefore, if it's not defined, we can infer that it should
# be a PromptMetadata type, but this is just good robustness
# in case we ever change our schema in the future
if prompt_name:
prompt = self.get_prompt(prompt_name)
# check next line not needed since it's already assumed
# we got here because target_metadata is None, just being
# extra safe
if not prompt.metadata:
target_metadata = PromptMetadata(parameters={})
prompt.metadata = target_metadata
else:
if not self.metadata:
target_metadata = ConfigMetadata()
self.metadata = target_metadata

assert target_metadata is not None
if target_metadata.parameters is None:
target_metadata.parameters = {}
target_metadata.parameters[parameter_name] = parameter_value

def update_parameter(
Expand Down
190 changes: 171 additions & 19 deletions python/tests/test_parameter_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,43 +49,195 @@ def test_delete_nonexistent_parameter(ai_config_runtime: AIConfigRuntime):
config.delete_parameter(parameter_name_to_delete)


def test_set_global_parameter(ai_config: AIConfig):
def test_set_parameter_for_aiconfig_empty_params(ai_config: AIConfig):
"""
Test setting a global parameter.
Test setting a global parameter when there are no global params set before.
We should create a new set of params.
"""
parameter_name = "global_param"
parameter_value = "global_value"
prompt_name = "prompt1"
prompt_parameter_name = "prompt_param"
prompt_parameter_value = "prompt_value"
prompt = Prompt(
name=prompt_name,
input="This is a prompt",
metadata=PromptMetadata(
model="fakemodel",
parameters= {
prompt_parameter_name: prompt_parameter_value,
}
),
)
ai_config.add_prompt(prompt_name, prompt)

ai_config.set_parameter(parameter_name, parameter_value, prompt_name=None)
assert ai_config.metadata.parameters == {}
aiconfig_parameter_name = "aiconfig_param"
aiconfig_parameter_value = "aiconfig_value"
ai_config.set_parameter(aiconfig_parameter_name, aiconfig_parameter_value)

# Ensure the global parameter is set correctly
assert ai_config.metadata.parameters[parameter_name] == parameter_value
assert prompt.metadata is not None
assert prompt.metadata.parameters == {
prompt_parameter_name: prompt_parameter_value,
}
assert ai_config.metadata.parameters is not None
assert ai_config.metadata.parameters == {
aiconfig_parameter_name: aiconfig_parameter_value,
}

def test_set_parameter_for_aiconfig_has_parameters(ai_config: AIConfig):
"""
Test setting a global parameter when it already has parameters.
It should overwrite the value for key that is the same and keep
the others unchanged.
"""
prompt_name = "prompt1"
prompt_parameter_name = "prompt_param"
prompt_parameter_value = "prompt_value"
prompt = Prompt(
name=prompt_name,
input="This is a prompt",
metadata=PromptMetadata(
model="fakemodel",
parameters= {
prompt_parameter_name: prompt_parameter_value,
}
),
)
ai_config.add_prompt(prompt_name, prompt)

aiconfig_parameter_name = "aiconfig_param"
ai_config.metadata = ConfigMetadata(
parameters= {
"random_key": "keep this parameter",
aiconfig_parameter_name: "should update this value",
}
)
aiconfig_parameter_value = "aiconfig_value"
ai_config.set_parameter(aiconfig_parameter_name, aiconfig_parameter_value)

# Ensure the global parameter is set correctly
assert prompt.metadata is not None
assert prompt.metadata.parameters == {
prompt_parameter_name: prompt_parameter_value,
}
assert ai_config.metadata.parameters is not None
assert ai_config.metadata.parameters == {
"random_key": "keep this parameter",
aiconfig_parameter_name: aiconfig_parameter_value,
}

def test_set_parameter_for_prompt(ai_config: AIConfig):
def test_set_parameter_for_prompt_no_metadata(ai_config: AIConfig):
"""
Test setting a parameter for a specific prompt.
Test setting a prompt parameter when there is no prompt metadata.
"""
prompt_name = "prompt1"
parameter_name = "prompt_param"
parameter_value = "prompt_value"
prompt = Prompt(
name=prompt_name,
input="This is a prompt",
)
ai_config.add_prompt(prompt_name, prompt)

aiconfig_parameter_name = "aiconfig_param"
aiconfig_parameter_value = "aiconfig_value"
ai_config.metadata = ConfigMetadata(
parameters= {
aiconfig_parameter_name: aiconfig_parameter_value,
}
)

# Create a sample prompt for testing
prompt_data = Prompt(
assert prompt.metadata is None
prompt_parameter_name = "prompt_param"
prompt_parameter_value = "prompt_value"
ai_config.set_parameter(prompt_parameter_name, prompt_parameter_value, prompt_name)

# Ensure the prompt parameter is set correctly
assert prompt.metadata is not None
assert prompt.metadata.parameters == {
prompt_parameter_name: prompt_parameter_value,
}
assert ai_config.metadata.parameters is not None
assert ai_config.metadata.parameters == {
aiconfig_parameter_name: aiconfig_parameter_value,
}

def test_set_parameter_for_prompt_no_parameters(ai_config: AIConfig):
"""
Test setting a prompt parameter when there are no prompt parameters.
"""
prompt_name = "prompt1"
prompt = Prompt(
name=prompt_name,
input="This is a prompt",
metadata=PromptMetadata(model="fakemodel"),
)
ai_config.add_prompt(prompt_name, prompt_data)
ai_config.add_prompt(prompt_name, prompt)

aiconfig_parameter_name = "aiconfig_param"
aiconfig_parameter_value = "aiconfig_value"
ai_config.metadata = ConfigMetadata(
parameters= {
aiconfig_parameter_name: aiconfig_parameter_value,
}
)

ai_config.set_parameter(parameter_name, parameter_value, prompt_name=prompt_name)
assert prompt.metadata is not None
assert prompt.metadata.parameters == {}
prompt_parameter_name = "prompt_param"
prompt_parameter_value = "prompt_value"
ai_config.set_parameter(prompt_parameter_name, prompt_parameter_value, prompt_name)

# Ensure the prompt parameter is set correctly
assert prompt.metadata is not None
assert prompt.metadata.parameters == {
prompt_parameter_name: prompt_parameter_value,
}
assert ai_config.metadata.parameters is not None
assert ai_config.metadata.parameters == {
aiconfig_parameter_name: aiconfig_parameter_value,
}

# Ensure the parameter is set for the specific prompt
assert (
ai_config.prompt_index[prompt_name].metadata.parameters[parameter_name]
== parameter_value
def test_set_parameter_for_prompt_has_parameters(ai_config: AIConfig):
"""
Test setting a prompt parameter when it already has parameters.
It should overwrite the value for key that is the same and keep
the others unchanged.
"""
prompt_name = "prompt1"
prompt_parameter_name = "prompt_param"
prompt = Prompt(
name=prompt_name,
input="This is a prompt",
metadata=PromptMetadata(
model="fakemodel",
parameters= {
"random_key": "keep this parameter",
prompt_parameter_name: "should update this value",
}
),
)
ai_config.add_prompt(prompt_name, prompt)

aiconfig_parameter_name = "aiconfig_param"
aiconfig_parameter_value = "aiconfig_value"
ai_config.metadata = ConfigMetadata(
parameters= {
aiconfig_parameter_name: aiconfig_parameter_value,
}
)
assert ai_config.prompts[0].metadata.parameters[parameter_name] == parameter_value

prompt_parameter_value = "prompt_value"
ai_config.set_parameter(prompt_parameter_name, prompt_parameter_value, prompt_name)

# Ensure the prompt parameter is set correctly
assert prompt.metadata is not None
assert prompt.metadata.parameters == {
"random_key": "keep this parameter",
prompt_parameter_name: prompt_parameter_value,
}
assert ai_config.metadata.parameters is not None
assert ai_config.metadata.parameters == {
aiconfig_parameter_name: aiconfig_parameter_value,
}


def test_update_existing_parameter(ai_config: AIConfig):
Expand Down

0 comments on commit 3fc6435

Please sign in to comment.