Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .github/workflows/trigger-pr-review-api.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Trigger an Automated Code Review API Call for a newly created PR

on:
pull_request:
types:
- opened
- reopened

jobs:
call-api:
runs-on: ubuntu-latest

steps:
# Checkout the repository code (optional)
- name: Checkout code
uses: actions/checkout@v4

# Make the API call with additional PR fields
- name: Trigger Automated Code Review API
env:
API_TOKEN: ${{ secrets.CODE_REVIEW_API_KEY }}
API_URL: "https://api.codeominous.com/prrequest"
run: |
curl -X POST \
-H "Authorization: Bearer $CODE_REVIEW_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"pr_event_type": "${{ github.event.action }}",
"pr_number": "${{ github.event.pull_request.number }}",
"repo": "${{ github.repository }}",
"pr_title": "${{ github.event.pull_request.title }}",
"user_login": "${{ github.event.pull_request.user.login }}",
"created_at": "${{ github.event.pull_request.created_at }}",
"pr_state": "${{ github.event.pull_request.state }}",
"pr_body": "${{ github.event.pull_request.body }}",
"html_url": "${{ github.event.pull_request.html_url }}",
"head_sha": "${{ github.event.pull_request.head.sha }}",
"base_ref": "${{ github.event.pull_request.base.ref }}"
}' \
$API_URL
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,13 @@
# pull-request-automated-chat
Websockets client chat with LLM

client connection:
ws://<your-host>:8080/ws/chat/<user_id>/<session_id>

Local testing:
Use the test client for local testing:
1. Build: docker build -t chat-ws-app .
1. Start the container:
docker run -p 8080:8080 chat-ws-app
2. Open pull-request-automated-chat\test_client\chat_client.html from a browser.

55 changes: 55 additions & 0 deletions pr_chat/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# Environment
.env
.env.*
env.json

# Logs
*.log

# Python virtual environments
.venv/
venv/
ENV/
chroma_load/chroma_venv/

# IDEs and editors
.vscode/
.idea/
*.sublime-project
*.sublime-workspace

# Jupyter Notebook checkpoints
.ipynb_checkpoints/

# Pytest cache
.pytest_cache/

# mypy
.mypy_cache/

# Coverage reports
htmlcov/
.coverage
coverage.xml

# Distribution / packaging
build/
dist/
*.egg-info/
.eggs/

# macOS
.DS_Store

# Other
*.db
*.sqlite3


# Jupyter Notebooks checkpoints
notebooks/.ipynb_checkpoints
21 changes: 21 additions & 0 deletions pr_chat/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Dockerfile

FROM python:3.10-slim

WORKDIR /app

# Install OS dependencies
RUN apt-get update && apt-get install -y build-essential

COPY static/ ./static
# Copy requirements and install
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

# Copy app code
COPY . .

# Run server
EXPOSE 8080
CMD ["uvicorn", "chat_ws_api:app", "--host", "0.0.0.0", "--port", "8080"]

Empty file added pr_chat/__init__.py
Empty file.
46 changes: 46 additions & 0 deletions pr_chat/chat_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# chat_api.py

from fastapi import FastAPI
from pydantic import BaseModel
import redis
import json
from langchain.chat_models import ChatAnthropic
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory

app = FastAPI()

# Redis client (adjust host for production)
r = redis.Redis(host='localhost', port=6379, db=0, decode_responses=True)

# Claude setup
llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.7)

class ChatRequest(BaseModel):
user_id: str
session_id: str
message: str

@app.post("/chat/send")
async def chat_send(req: ChatRequest):
session_key = f"chat:{req.session_id}"

# Load existing conversation from Redis
history_json = r.get(session_key)
history = json.loads(history_json) if history_json else []

# Load into LangChain memory
memory = ConversationBufferMemory(return_messages=True)
for turn in history:
memory.chat_memory.add_user_message(turn["user"])
memory.chat_memory.add_ai_message(turn["ai"])

# Run the chain
chain = ConversationChain(llm=llm, memory=memory)
response = chain.run(req.message)

# Save updated conversation
history.append({"user": req.message, "ai": response})
r.set(session_key, json.dumps(history), ex=3600) # TTL 1 hour

return {"reply": response}
74 changes: 74 additions & 0 deletions pr_chat/chat_ws_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# chat_ws_api.py
import os
import json
import redis
import boto3
from fastapi import FastAPI, WebSocket, WebSocketDisconnect
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from langchain_anthropic import ChatAnthropic
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnableWithMessageHistory
from langchain.memory.chat_message_histories import RedisChatMessageHistory

# FastAPI app
app = FastAPI()

# Parameter Store (SSM) for API key
ssm = boto3.client('ssm', region_name='us-east-1')
ANTHROPIC_API_KEY = ssm.get_parameter(
Name="/prreview/ANTHROPIC_API_KEY",
WithDecryption=True
)['Parameter']['Value']
os.environ["ANTHROPIC_API_KEY"] = ANTHROPIC_API_KEY

# Claude setup
llm = ChatAnthropic(model="claude-3-7-sonnet-20250219", temperature=0.7)

# Prompt
prompt = ChatPromptTemplate.from_messages([
("system", "You're a helpful assistant."),
("human", "{input}")
])

# Redis config
redis_host = os.environ.get("REDIS_HOST", "localhost")
redis_port = int(os.environ.get("REDIS_PORT", "6379"))

# Chain with memory
def get_history(session_id: str):
return RedisChatMessageHistory(session_id=session_id, url=f"redis://{redis_host}:{redis_port}")

chat_chain = RunnableWithMessageHistory(
prompt | llm | StrOutputParser(),
get_session_history=get_history,
input_messages_key="input",
history_messages_key="messages"
)

# WebSocket chat endpoint
@app.websocket("/ws/chat/{user_id}/{session_id}")
async def websocket_chat(websocket: WebSocket, user_id: str, session_id: str):
await websocket.accept()
try:
while True:
message = await websocket.receive_text()
print(f"Received message from {user_id}/{session_id}: {message}")

response = chat_chain.invoke(
{"input": message},
config={"configurable": {"session_id": session_id}}
)

await websocket.send_text(response)

except WebSocketDisconnect:
print(f"Client disconnected: {user_id}/{session_id}")

# Serve static chat client
@app.get("/")
async def serve_index():
return FileResponse("static/index.html")

app.mount("/static", StaticFiles(directory="static", html=True), name="static")
23 changes: 23 additions & 0 deletions pr_chat/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
services:
chat-api:
build:
context: .
dockerfile: Dockerfile
container_name: chat-api
ports:
- "8080:8080"
environment:
REDIS_HOST: redis
REDIS_PORT: 6379
AWS_REGION: us-east-1
volumes:
- ~/.aws:/root/.aws:ro
depends_on:
- redis

redis:
image: redis:7
container_name: chat-redis
ports:
- "6379:6379"
restart: always
12 changes: 12 additions & 0 deletions pr_chat/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
langchain>=0.1.14
langchain-community>=0.0.30
langchain-core>=0.1.33
langchain-anthropic>=0.1.5
langchain-core>=0.1.33
redis
boto3
python-dotenv
python-dateutil
PyGithub
uvicorn[standard]
fastapi[all]
58 changes: 58 additions & 0 deletions pr_chat/static/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>LangChain Chat</title>
<style>
body { font-family: Arial, sans-serif; padding: 20px; }
#chat { border: 1px solid #ccc; padding: 10px; height: 300px; overflow-y: scroll; }
.user { color: blue; }
.bot { color: green; }
</style>
</head>
<body>
<h2>WebSocket Chat with Claude (LangChain)</h2>

<label>User ID: <input id="userId" value="user123"></label><br>
<label>Session ID: <input id="sessionId" value="session456"></label><br>
<button onclick="connect()">Connect</button>

<div id="chat"></div>

<input id="messageInput" type="text" placeholder="Type your message here..." style="width: 80%;">
<button onclick="sendMessage()">Send</button>

<script>
let socket;

function connect() {
const userId = document.getElementById("userId").value;
const sessionId = document.getElementById("sessionId").value;
const url = `ws://localhost:8080/ws/chat/${userId}/${sessionId}`;

socket = new WebSocket(url);

socket.onmessage = function(event) {
const msg = event.data;
document.getElementById("chat").innerHTML += `<div class='bot'><strong>Claude:</strong> ${msg}</div>`;
};

socket.onopen = function() {
document.getElementById("chat").innerHTML += "<div><em>Connected to chat server.</em></div>";
};

socket.onclose = function() {
document.getElementById("chat").innerHTML += "<div><em>Disconnected.</em></div>";
};
}

function sendMessage() {
const input = document.getElementById("messageInput");
const msg = input.value;
input.value = "";
document.getElementById("chat").innerHTML += `<div class='user'><strong>You:</strong> ${msg}</div>`;
socket.send(msg);
}
</script>
</body>
</html>