From 710642a5a4320aa4021af8f51f3e3e5d00e2d1c7 Mon Sep 17 00:00:00 2001 From: ississippi Date: Sat, 17 May 2025 17:27:50 -0400 Subject: [PATCH 1/2] Initial checkin. This version uses redis for history. Will be changing to DynamoDB in the next iteration. Chat is fully functional using the test harness index.html --- README.md | 11 ++++++ pr_chat/.gitignore | 55 ++++++++++++++++++++++++++++ pr_chat/Dockerfile | 21 +++++++++++ pr_chat/__init__.py | 0 pr_chat/chat_api.py | 46 ++++++++++++++++++++++++ pr_chat/chat_ws_api.py | 74 ++++++++++++++++++++++++++++++++++++++ pr_chat/docker-compose.yml | 23 ++++++++++++ pr_chat/requirements.txt | 12 +++++++ pr_chat/static/index.html | 58 ++++++++++++++++++++++++++++++ 9 files changed, 300 insertions(+) create mode 100644 pr_chat/.gitignore create mode 100644 pr_chat/Dockerfile create mode 100644 pr_chat/__init__.py create mode 100644 pr_chat/chat_api.py create mode 100644 pr_chat/chat_ws_api.py create mode 100644 pr_chat/docker-compose.yml create mode 100644 pr_chat/requirements.txt create mode 100644 pr_chat/static/index.html diff --git a/README.md b/README.md index 43d5764..8a7d5f9 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,13 @@ # pull-request-automated-chat Websockets client chat with LLM + +client connection: +ws://:8080/ws/chat// + +Local testing: +Use the test client for local testing: +1. Build: docker build -t chat-ws-app . +1. Start the container: + docker run -p 8080:8080 chat-ws-app +2. Open pull-request-automated-chat\test_client\chat_client.html from a browser. + diff --git a/pr_chat/.gitignore b/pr_chat/.gitignore new file mode 100644 index 0000000..d917a9b --- /dev/null +++ b/pr_chat/.gitignore @@ -0,0 +1,55 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Environment +.env +.env.* +env.json + +# Logs +*.log + +# Python virtual environments +.venv/ +venv/ +ENV/ +chroma_load/chroma_venv/ + +# IDEs and editors +.vscode/ +.idea/ +*.sublime-project +*.sublime-workspace + +# Jupyter Notebook checkpoints +.ipynb_checkpoints/ + +# Pytest cache +.pytest_cache/ + +# mypy +.mypy_cache/ + +# Coverage reports +htmlcov/ +.coverage +coverage.xml + +# Distribution / packaging +build/ +dist/ +*.egg-info/ +.eggs/ + +# macOS +.DS_Store + +# Other +*.db +*.sqlite3 + + +# Jupyter Notebooks checkpoints +notebooks/.ipynb_checkpoints diff --git a/pr_chat/Dockerfile b/pr_chat/Dockerfile new file mode 100644 index 0000000..df8e90b --- /dev/null +++ b/pr_chat/Dockerfile @@ -0,0 +1,21 @@ +# Dockerfile + +FROM python:3.10-slim + +WORKDIR /app + +# Install OS dependencies +RUN apt-get update && apt-get install -y build-essential + +COPY static/ ./static +# Copy requirements and install +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy app code +COPY . . + +# Run server +EXPOSE 8080 +CMD ["uvicorn", "chat_ws_api:app", "--host", "0.0.0.0", "--port", "8080"] + diff --git a/pr_chat/__init__.py b/pr_chat/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pr_chat/chat_api.py b/pr_chat/chat_api.py new file mode 100644 index 0000000..45f76c1 --- /dev/null +++ b/pr_chat/chat_api.py @@ -0,0 +1,46 @@ +# chat_api.py + +from fastapi import FastAPI +from pydantic import BaseModel +import redis +import json +from langchain.chat_models import ChatAnthropic +from langchain.chains import ConversationChain +from langchain.memory import ConversationBufferMemory + +app = FastAPI() + +# Redis client (adjust host for production) +r = redis.Redis(host='localhost', port=6379, db=0, decode_responses=True) + +# Claude setup +llm = ChatAnthropic(model="claude-3-sonnet-20240229", temperature=0.7) + +class ChatRequest(BaseModel): + user_id: str + session_id: str + message: str + +@app.post("/chat/send") +async def chat_send(req: ChatRequest): + session_key = f"chat:{req.session_id}" + + # Load existing conversation from Redis + history_json = r.get(session_key) + history = json.loads(history_json) if history_json else [] + + # Load into LangChain memory + memory = ConversationBufferMemory(return_messages=True) + for turn in history: + memory.chat_memory.add_user_message(turn["user"]) + memory.chat_memory.add_ai_message(turn["ai"]) + + # Run the chain + chain = ConversationChain(llm=llm, memory=memory) + response = chain.run(req.message) + + # Save updated conversation + history.append({"user": req.message, "ai": response}) + r.set(session_key, json.dumps(history), ex=3600) # TTL 1 hour + + return {"reply": response} diff --git a/pr_chat/chat_ws_api.py b/pr_chat/chat_ws_api.py new file mode 100644 index 0000000..30fce80 --- /dev/null +++ b/pr_chat/chat_ws_api.py @@ -0,0 +1,74 @@ +# chat_ws_api.py +import os +import json +import redis +import boto3 +from fastapi import FastAPI, WebSocket, WebSocketDisconnect +from fastapi.staticfiles import StaticFiles +from fastapi.responses import FileResponse +from langchain_anthropic import ChatAnthropic +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.output_parsers import StrOutputParser +from langchain_core.runnables import RunnableWithMessageHistory +from langchain.memory.chat_message_histories import RedisChatMessageHistory + +# FastAPI app +app = FastAPI() + +# Parameter Store (SSM) for API key +ssm = boto3.client('ssm', region_name='us-east-1') +ANTHROPIC_API_KEY = ssm.get_parameter( + Name="/prreview/ANTHROPIC_API_KEY", + WithDecryption=True +)['Parameter']['Value'] +os.environ["ANTHROPIC_API_KEY"] = ANTHROPIC_API_KEY + +# Claude setup +llm = ChatAnthropic(model="claude-3-7-sonnet-20250219", temperature=0.7) + +# Prompt +prompt = ChatPromptTemplate.from_messages([ + ("system", "You're a helpful assistant."), + ("human", "{input}") +]) + +# Redis config +redis_host = os.environ.get("REDIS_HOST", "localhost") +redis_port = int(os.environ.get("REDIS_PORT", "6379")) + +# Chain with memory +def get_history(session_id: str): + return RedisChatMessageHistory(session_id=session_id, url=f"redis://{redis_host}:{redis_port}") + +chat_chain = RunnableWithMessageHistory( + prompt | llm | StrOutputParser(), + get_session_history=get_history, + input_messages_key="input", + history_messages_key="messages" +) + +# WebSocket chat endpoint +@app.websocket("/ws/chat/{user_id}/{session_id}") +async def websocket_chat(websocket: WebSocket, user_id: str, session_id: str): + await websocket.accept() + try: + while True: + message = await websocket.receive_text() + print(f"Received message from {user_id}/{session_id}: {message}") + + response = chat_chain.invoke( + {"input": message}, + config={"configurable": {"session_id": session_id}} + ) + + await websocket.send_text(response) + + except WebSocketDisconnect: + print(f"Client disconnected: {user_id}/{session_id}") + +# Serve static chat client +@app.get("/") +async def serve_index(): + return FileResponse("static/index.html") + +app.mount("/static", StaticFiles(directory="static", html=True), name="static") diff --git a/pr_chat/docker-compose.yml b/pr_chat/docker-compose.yml new file mode 100644 index 0000000..1070db3 --- /dev/null +++ b/pr_chat/docker-compose.yml @@ -0,0 +1,23 @@ +services: + chat-api: + build: + context: . + dockerfile: Dockerfile + container_name: chat-api + ports: + - "8080:8080" + environment: + REDIS_HOST: redis + REDIS_PORT: 6379 + AWS_REGION: us-east-1 + volumes: + - ~/.aws:/root/.aws:ro + depends_on: + - redis + + redis: + image: redis:7 + container_name: chat-redis + ports: + - "6379:6379" + restart: always diff --git a/pr_chat/requirements.txt b/pr_chat/requirements.txt new file mode 100644 index 0000000..2b0ac96 --- /dev/null +++ b/pr_chat/requirements.txt @@ -0,0 +1,12 @@ +langchain>=0.1.14 +langchain-community>=0.0.30 +langchain-core>=0.1.33 +langchain-anthropic>=0.1.5 +langchain-core>=0.1.33 +redis +boto3 +python-dotenv +python-dateutil +PyGithub +uvicorn[standard] +fastapi[all] diff --git a/pr_chat/static/index.html b/pr_chat/static/index.html new file mode 100644 index 0000000..1de307e --- /dev/null +++ b/pr_chat/static/index.html @@ -0,0 +1,58 @@ + + + + + LangChain Chat + + + +

WebSocket Chat with Claude (LangChain)

+ +
+
+ + +
+ + + + + + + From acf85450597201cf119689af74843cc3619a2302 Mon Sep 17 00:00:00 2001 From: ississippi Date: Sat, 17 May 2025 17:32:02 -0400 Subject: [PATCH 2/2] add actions workflow --- .github/workflows/trigger-pr-review-api.yml | 40 +++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 .github/workflows/trigger-pr-review-api.yml diff --git a/.github/workflows/trigger-pr-review-api.yml b/.github/workflows/trigger-pr-review-api.yml new file mode 100644 index 0000000..3e32aab --- /dev/null +++ b/.github/workflows/trigger-pr-review-api.yml @@ -0,0 +1,40 @@ +name: Trigger an Automated Code Review API Call for a newly created PR + +on: + pull_request: + types: + - opened + - reopened + +jobs: + call-api: + runs-on: ubuntu-latest + + steps: + # Checkout the repository code (optional) + - name: Checkout code + uses: actions/checkout@v4 + + # Make the API call with additional PR fields + - name: Trigger Automated Code Review API + env: + API_TOKEN: ${{ secrets.CODE_REVIEW_API_KEY }} + API_URL: "https://api.codeominous.com/prrequest" + run: | + curl -X POST \ + -H "Authorization: Bearer $CODE_REVIEW_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "pr_event_type": "${{ github.event.action }}", + "pr_number": "${{ github.event.pull_request.number }}", + "repo": "${{ github.repository }}", + "pr_title": "${{ github.event.pull_request.title }}", + "user_login": "${{ github.event.pull_request.user.login }}", + "created_at": "${{ github.event.pull_request.created_at }}", + "pr_state": "${{ github.event.pull_request.state }}", + "pr_body": "${{ github.event.pull_request.body }}", + "html_url": "${{ github.event.pull_request.html_url }}", + "head_sha": "${{ github.event.pull_request.head.sha }}", + "base_ref": "${{ github.event.pull_request.base.ref }}" + }' \ + $API_URL \ No newline at end of file