Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add claude 3 / anthropic with cost calculation + convenience fixes. gpt-engineer is coauthor #1057

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .env.template
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
### OpenAI Setup ###

# OPENAI_API_KEY=Your personal OpenAI API key from https://platform.openai.com/account/api-keys
OPENAI_API_KEY=$key
OPENAI_API_KEY=...
ANTHROPIC_API_KEY=...
21 changes: 9 additions & 12 deletions gpt_engineer/applications/cli/cli_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@
and process the code through various steps defined in the step bundle.
"""

from typing import Callable, Optional, TypeVar
from typing import Callable, Optional, TypeAlias

# from gpt_engineer.core.default.git_version_manager import GitVersionManager
from gpt_engineer.core.ai import AI
from gpt_engineer.core.base_agent import BaseAgent
from gpt_engineer.core.base_execution_env import BaseExecutionEnv
Expand All @@ -18,18 +17,16 @@
execute_entrypoint,
gen_code,
gen_entrypoint,
improve,
improve_fn,
)
from gpt_engineer.core.files_dict import FilesDict
from gpt_engineer.core.preprompts_holder import PrepromptsHolder

CodeGenType = TypeVar("CodeGenType", bound=Callable[[AI, str, BaseMemory], FilesDict])
CodeProcessor = TypeVar(
"CodeProcessor", bound=Callable[[AI, BaseExecutionEnv, FilesDict], FilesDict]
)
ImproveType = TypeVar(
"ImproveType", bound=Callable[[AI, str, FilesDict, BaseMemory], FilesDict]
)
# from gpt_engineer.core.default.git_version_manager import GitVersionManager

CodeGenType: TypeAlias = Callable[[AI, str, BaseMemory], FilesDict]
CodeProcessor: TypeAlias = Callable[[AI, BaseExecutionEnv, FilesDict], FilesDict]
ImproveType: TypeAlias = Callable[[AI, str, FilesDict, BaseMemory], FilesDict]


class CliAgent(BaseAgent):
Expand Down Expand Up @@ -86,7 +83,7 @@ def __init__(
execution_env: BaseExecutionEnv,
ai: AI = None,
code_gen_fn: CodeGenType = gen_code,
improve_fn: ImproveType = improve,
improve_fn: ImproveType = improve_fn,
process_code_fn: CodeProcessor = execute_entrypoint,
preprompts_holder: PrepromptsHolder = None,
):
Expand All @@ -105,7 +102,7 @@ def with_default_config(
execution_env: DiskExecutionEnv,
ai: AI = None,
code_gen_fn: CodeGenType = gen_code,
improve_fn: ImproveType = improve,
improve_fn: ImproveType = improve_fn,
process_code_fn: CodeProcessor = execute_entrypoint,
preprompts_holder: PrepromptsHolder = None,
):
Expand Down
9 changes: 5 additions & 4 deletions gpt_engineer/applications/cli/collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
Consent logic is in gpt_engineer/learning.py.
"""

from typing import Tuple
from typing import Any, Tuple

from gpt_engineer.applications.cli.learning import (
Learning,
Expand All @@ -49,7 +49,7 @@ def send_learning(learning: Learning):
improving gpt-engineer, and letting it handle more use cases.
Consent logic is in gpt_engineer/learning.py.
"""
import rudderstack.analytics as rudder_analytics
import rudderstack.analytics as rudder_analytics # fmt: skip

rudder_analytics.write_key = "2Re4kqwL61GDp7S8ewe6K5dbogG"
rudder_analytics.dataPlaneUrl = "https://gptengineerezm.dataplane.rudderstack.com"
Expand All @@ -65,7 +65,7 @@ def collect_learnings(
prompt: str,
model: str,
temperature: float,
config: any,
config: Any,
memory: DiskMemory,
review: Review,
):
Expand Down Expand Up @@ -99,7 +99,8 @@ def collect_learnings(
# try to remove some parts of learning that might be too big
# rudderstack max event size is 32kb
max_size = 32 << 10 # 32KB in bytes
current_size = len(learnings.to_json().encode("utf-8")) # get size in bytes
learnings_bytes = learnings.to_json().encode("utf-8") # type: ignore
current_size = len(learnings_bytes) # get size in bytes

overflow = current_size - max_size

Expand Down
34 changes: 12 additions & 22 deletions gpt_engineer/applications/cli/file_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import subprocess

from pathlib import Path
from typing import Any, Dict, List, Union
from typing import Any, Dict, Generator, List, Union

import toml

Expand Down Expand Up @@ -288,14 +288,22 @@ def get_files_from_toml(
print(f"\nYou have selected the following files:\n{input_path}")

project_path = Path(input_path).resolve()
all_paths = set(
selected_paths = set(
project_path.joinpath(file).resolve(strict=False) for file in selected_files
)

for displayable_path in DisplayablePath.make_tree(project_path):
if displayable_path.path in selected_paths:
p = displayable_path
while p.parent and p.parent.path not in selected_paths:
selected_paths.add(p.parent.path)
p = p.parent

try:
for displayable_path in DisplayablePath.make_tree(project_path):
if displayable_path.path in all_paths:
if displayable_path.path in selected_paths:
print(displayable_path.displayable())

except FileNotFoundError:
print("Specified path does not exist: ", project_path)
except Exception as e:
Expand Down Expand Up @@ -378,24 +386,6 @@ def get_current_files(self, project_path: Union[str, Path]) -> List[str]:

return all_files

def is_in_ignoring_extensions(self, path: Path) -> bool:
"""
Checks if a file path should be ignored based on predefined criteria.

Parameters
----------
path : Path
The path to the file to be checked.

Returns
-------
bool
True if the file should not be ignored, False otherwise.
"""
is_hidden = not path.name.startswith(".")
is_pycache = "__pycache__" not in path.name
return is_hidden and is_pycache


class DisplayablePath(object):
"""
Expand Down Expand Up @@ -444,7 +434,7 @@ def display_name(self) -> str:
@classmethod
def make_tree(
cls, root: Union[str, Path], parent=None, is_last=False, criteria=None
):
) -> Generator["DisplayablePath", None, None]:
"""
Creates a tree of DisplayablePath objects from a root directory.

Expand Down
12 changes: 5 additions & 7 deletions gpt_engineer/applications/cli/learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,6 @@ class Learning:
+ "(ncertain): "
)

VALID_INPUTS = ("y", "n", "u")


def human_review_input() -> Optional[Review]:
"""
Expand All @@ -141,17 +139,17 @@ def human_review_input() -> Optional[Review]:
print()

ran = input("Did the generated code run at all? " + TERM_CHOICES)
ran = ask_for_valid_input(ran, VALID_INPUTS)
ran = ask_for_valid_input(ran)

if ran == "y":
perfect = input(
"Did the generated code do everything you wanted? " + TERM_CHOICES
)
perfect = ask_for_valid_input(perfect, VALID_INPUTS)
perfect = ask_for_valid_input(perfect)

if perfect != "y":
useful = input("Did the generated code do anything useful? " + TERM_CHOICES)
useful = ask_for_valid_input(useful, VALID_INPUTS)
useful = ask_for_valid_input(useful)
else:
useful = ""
else:
Expand All @@ -175,8 +173,8 @@ def human_review_input() -> Optional[Review]:
)


def ask_for_valid_input(ran, valid_inputs):
while ran not in valid_inputs:
def ask_for_valid_input(ran):
while ran not in ("y", "n", "u"):
ran = input("Invalid input. Please enter y, n, or u: ")
return ran

Expand Down
Loading
Loading