Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .env Azure sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# OpenAI Settings
OPENAI_API_KEY=your_openai_key
AI_MODEL=o1-mini

# Azure OpenAI Settings
USE_AZURE=false # Set to "true" to use Azure OpenAI
AZURE_OPENAI_API_KEY=your_azure_key
AZURE_OPENAI_API_VERSION=2024-10-01-preview
AZURE_OPENAI_ENDPOINT=https://your-resource-name.openai.azure.com
AZURE_DEPLOYMENT_NAME=your_deployment_name
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ A command-line tool designed to assist developers in managing and interacting wi

## NEW

Added Azure OpenAI as another option. To use Azure OpenAI set USE_AZURE to "true" and make sure you add your AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_VERSION, AZURE_OPENAI_ENDPOINT, and AZURE_DEPLOYMENT_NAME in your .env file.
Added Grok Engineer to the repo. Make sure you add your XAI_API_KEY in your .env file.
Added Streaming.

Expand Down
45 changes: 32 additions & 13 deletions o1-eng.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import fnmatch
import logging
import time
from openai import OpenAI
from openai import OpenAI, AzureOpenAI
from dotenv import load_dotenv
from termcolor import colored
from prompt_toolkit import prompt
Expand All @@ -18,9 +18,18 @@

load_dotenv()

MODEL = "o1-mini"
# Initialize OpenAI client
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
MODEL = os.getenv("AI_MODEL", "o1-mini") # Default to o1-mini if not specified
USE_AZURE = os.getenv("USE_AZURE", "false").lower() == "true"

# Initialize OpenAI clients
if USE_AZURE:
client = AzureOpenAI(
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT")
)
else:
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))


CREATE_SYSTEM_PROMPT = """You are an advanced o1 engineer designed to create files and folders based on user instructions. Your primary objective is to generate the content of the files to be created as code blocks. Each code block should specify whether it's a file or folder, along with its path.
Expand Down Expand Up @@ -437,13 +446,22 @@ def chat_with_ai(user_message, is_edit_request=False, retry_count=0, added_files
print(colored("o1 engineer is thinking...", "magenta"))
logging.info("Sending general query to AI.")

# Create streaming response
stream = client.chat.completions.create(
model=MODEL,
messages=messages,
max_completion_tokens=60000,
stream=True
)
# Create streaming response based on provider
if USE_AZURE:
deployment_name = os.getenv("AZURE_DEPLOYMENT_NAME")
stream = client.chat.completions.create(
model=deployment_name,
messages=messages,
max_tokens=60000,
stream=True
)
else:
stream = client.chat.completions.create(
model=MODEL,
messages=messages,
max_completion_tokens=60000,
stream=True
)

# Initialize response content
response_content = ""
Expand All @@ -469,8 +487,9 @@ def chat_with_ai(user_message, is_edit_request=False, retry_count=0, added_files

return last_ai_response
except Exception as e:
print(colored(f"\nError while communicating with OpenAI: {e}", "red"))
logging.error(f"Error while communicating with OpenAI: {e}")
provider = "Azure OpenAI" if USE_AZURE else "OpenAI"
print(colored(f"\nError while communicating with {provider}: {e}", "red"))
logging.error(f"Error while communicating with {provider}: {e}")
return None


Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
termcolor
prompt_toolkit
rich
python-dotenv