Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
version: "3.9"

services:
gpt-engineer:
build:
context: .
dockerfile: docker/Dockerfile
image: gpt-engineer
stdin_open: true
tty: true
# Set the API key from the .env file
env_file:
- .env
## OR set the API key directly
# OR set the API key directly:
# environment:
# - OPENAI_API_KEY=YOUR_API_KEY
image: gpt-engineer
# - OPENAI_API_KEY=YOUR_API_KEY
volumes:
- ./projects/example:/project
28 changes: 20 additions & 8 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,29 +1,41 @@
# Stage 1: Builder stage
FROM python:3.11-slim AS builder
FROM python:3.13-slim AS builder

# Install necessary OS packages
RUN apt-get update && apt-get install -y --no-install-recommends \
tk \
tcl \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
tk \
tcl \
curl \
git \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app

# Copy source code
COPY . .

# Install dependencies
RUN pip install --no-cache-dir -e .
RUN pip install --no-cache-dir openai>=1.0.0 backoff langchain_google_genai

# Stage 2: Final stage
FROM python:3.11-slim
FROM python:3.13-slim

WORKDIR /app

COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages
# Copy installed Python packages and binaries from builder
COPY --from=builder /usr/local/lib/python3.13/site-packages /usr/local/lib/python3.13/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
COPY --from=builder /usr/bin /usr/bin

# Copy app code
COPY --from=builder /app .

# Copy entrypoint and ensure Unix line endings
COPY docker/entrypoint.sh .

# Make entrypoint executable
RUN chmod +x /app/entrypoint.sh

# Set entrypoint
ENTRYPOINT ["bash", "/app/entrypoint.sh"]
12 changes: 8 additions & 4 deletions docker/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
#!/usr/bin/env bash
# -*- coding: utf-8 -*-

set -e # Exit immediately if a command exits with non-zero status

project_dir="/project"

# Run the gpt engineer script
gpt-engineer $project_dir "$@"
# Run gpt-engineer with all passed arguments
gpt-engineer "$project_dir" "$@"

# Patch the permissions of the generated files to be owned by nobody except prompt file
find "$project_dir" -mindepth 1 -maxdepth 1 ! -path "$project_dir/prompt" -exec chown -R nobody:nogroup {} + -exec chmod -R 777 {} +
# Patch permissions of generated files
find "$project_dir" -mindepth 1 -maxdepth 1 ! -path "$project_dir/prompt" \
-exec chown -R nobody:nogroup {} + \
-exec chmod -R 777 {} +
13 changes: 12 additions & 1 deletion gpt_engineer/core/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@
)
from langchain_anthropic import ChatAnthropic
from langchain_openai import AzureChatOpenAI, ChatOpenAI
from langchain_google_genai import ChatGemini


from gpt_engineer.core.token_usage import TokenUsageLog

Expand Down Expand Up @@ -250,7 +252,7 @@ def next(

return messages

@backoff.on_exception(backoff.expo, openai.RateLimitError, max_tries=7, max_time=45)
@backoff.on_exception(backoff.expo, openai.error.RateLimitError, max_tries=7, max_time=45)
def backoff_inference(self, messages):
"""
Perform inference using the language model while implementing an exponential backoff strategy.
Expand Down Expand Up @@ -362,6 +364,14 @@ def _create_chat_model(self) -> BaseChatModel:
streaming=self.streaming,
max_tokens_to_sample=4096,
)
elif "gemini" in self.model_name:
return ChatGemini(
model=self.model_name,
temperature=self.temperature,
streaming=self.streaming,
callbacks=[StreamingStdOutCallbackHandler()],
max_output_tokens=4096,
)
elif self.vision:
return ChatOpenAI(
model=self.model_name,
Expand All @@ -379,6 +389,7 @@ def _create_chat_model(self) -> BaseChatModel:
)



def serialize_messages(messages: List[Message]) -> str:
return AI.serialize_messages(messages)

Expand Down
Loading