Skip to content

Commit

Permalink
global replace for enable_prompt_extensions
Browse files Browse the repository at this point in the history
  • Loading branch information
xingyaoww committed Jan 16, 2025
1 parent 4c4d454 commit 9ee18fd
Show file tree
Hide file tree
Showing 22 changed files with 25 additions and 25 deletions.
2 changes: 1 addition & 1 deletion config.template.toml
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ codeact_enable_jupyter = true
#llm_config = 'your-llm-config-group'

# Whether to use prompt extension (e.g., microagent, repo/runtime info) at all
#use_prompt_extension = true
#enable_prompt_extensions = true

# List of microagents to disable
#disabled_microagents = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ Agent 配置选项在 `config.toml` 文件的 `[agent]` 和 `[agent.<agent_name>
- 描述: 是否在 action space 中启用 Jupyter

**Microagent 使用**
- `use_prompt_extension`
- `enable_prompt_extensions`
- 类型: `bool`
- 默认值: `true`
- 描述: 是否使用 microagents
Expand Down
2 changes: 1 addition & 1 deletion docs/modules/usage/configuration-options.md
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ The agent configuration options are defined in the `[agent]` and `[agent.<agent_
- Description: Whether Jupyter is enabled in the action space

### Microagent Usage
- `use_prompt_extension`
- `enable_prompt_extensions`
- Type: `bool`
- Default: `true`
- Description: Whether to use microagents at all
Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/EDA/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/agent_bench/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/aider_bench/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False

# copy 'draft_editor' config if exists
config_copy = copy.deepcopy(config)
Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/biocoder/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/bird/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/browsing_delegation/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/discoverybench/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
agent_config = AgentConfig(
function_calling=False,
codeact_enable_jupyter=True,
Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/gaia/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/gorilla/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/gpqa/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/humanevalfix/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/logic_reasoning/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/mint/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/ml_bench/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/toolqa/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
2 changes: 1 addition & 1 deletion evaluation/benchmarks/webarena/run_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def get_config(
)
config.set_llm_config(metadata.llm_config)
agent_config = config.get_agent_config(metadata.agent_class)
agent_config.use_prompt_extension = False
agent_config.enable_prompt_extensions = False
return config


Expand Down
4 changes: 2 additions & 2 deletions openhands/agenthub/codeact_agent/codeact_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def __init__(
os.path.dirname(os.path.dirname(openhands.__file__)),
'microagents',
)
if self.config.use_prompt_extension
if self.config.enable_prompt_extensions
else None,
prompt_dir=os.path.join(os.path.dirname(__file__), 'prompts'),
disabled_microagents=self.config.disabled_microagents,
Expand Down Expand Up @@ -450,7 +450,7 @@ def _get_messages(self, state: State) -> list[Message]:

# Repository and runtime info
additional_info = self.prompt_manager.get_additional_info()
if self.config.use_prompt_extension and additional_info:
if self.config.enable_prompt_extensions and additional_info:
# only add these if prompt extension is enabled
messages.append(
Message(
Expand Down
4 changes: 2 additions & 2 deletions openhands/core/config/agent_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class AgentConfig:
memory_enabled: Whether long-term memory (embeddings) is enabled.
memory_max_threads: The maximum number of threads indexing at the same time for embeddings.
llm_config: The name of the llm config to use. If specified, this will override global llm config.
use_prompt_extension: Whether to use prompt extension (e.g., microagents, inject runtime info). Default is True.
enable_prompt_extensions: Whether to use prompt extensions (e.g., microagents, inject runtime info). Default is True.
disabled_microagents: A list of microagents to disable. Default is None.
condenser: Configuration for the memory condenser. Default is NoOpCondenserConfig.
"""
Expand All @@ -29,7 +29,7 @@ class AgentConfig:
memory_enabled: bool = False
memory_max_threads: int = 3
llm_config: str | None = None
use_prompt_extension: bool = True
enable_prompt_extensions: bool = True
disabled_microagents: list[str] | None = None
condenser: CondenserConfig = field(default_factory=NoOpCondenserConfig) # type: ignore

Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_codeact_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,7 @@ def test_mock_function_calling():
llm = Mock()
llm.is_function_calling_active = lambda: False
config = AgentConfig()
config.use_prompt_extension = False
config.enable_prompt_extensions = False
agent = CodeActAgent(llm=llm, config=config)
assert agent.mock_function_calling is True

Expand Down Expand Up @@ -509,7 +509,7 @@ def test_step_with_no_pending_actions(mock_state: State):

# Create agent with mocked LLM
config = AgentConfig()
config.use_prompt_extension = False
config.enable_prompt_extensions = False
agent = CodeActAgent(llm=llm, config=config)

# Test step with no pending actions
Expand Down

0 comments on commit 9ee18fd

Please sign in to comment.