|
1 | 1 | {
|
| 2 | + // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― |
2 | 3 | // Brave Search API Key
|
| 4 | + |
3 | 5 | // Used by agent function `search_web()` which agents can use to help with their task
|
4 | 6 | // https://brave.com/search/api/ (there is a free plan)
|
5 | 7 | // "Data for AI" plan is prefered over "Data for Search" since it includes some useful additional props
|
6 | 8 | "brave_search_api_key": "[BRAVE_SEARCH_API_KEY]",
|
7 | 9 |
|
| 10 | + // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― |
| 11 | + // Current Model |
| 12 | + |
8 | 13 | // Selected `model` that agents must use in the list of OpenAI API endpoints
|
9 |
| - "current_model": "gpt-4-32k", |
| 14 | + "current_model": "Open-Orca/Mistral-7B-OpenOrca", |
| 15 | + |
| 16 | + // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― |
| 17 | + // Project Description |
10 | 18 |
|
11 | 19 | // Description of the program you want the agents to develop
|
12 |
| - // You can also set it to `null` if you want the Product Owner agent to prompt you each time you run OADS. |
13 |
| - "initial_project_description": "Quickly develop and run a basic CLI snake game in Python.", |
| 20 | + // You can also set it to `null` if you want the Product Owner agent |
| 21 | + // to prompt you for your project desciption each time you run OADS. |
| 22 | + "initial_project_description": "Create a \"guess the number\" CLI game in Python.", |
| 23 | + |
| 24 | + // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― |
| 25 | + // List of OpenAI-Compatible API endpoints |
14 | 26 |
|
15 |
| - // List of OpenAI API endpoints |
16 | 27 | // The `model` key must be unique.
|
| 28 | + // https://microsoft.github.io/FLAML/docs/reference/autogen/oai/completion/#create |
17 | 29 | "models": [
|
18 | 30 | // Any model using Azure OpenAI API
|
19 | 31 | {
|
|
41 | 53 | {
|
42 | 54 | "model": "gpt-4-32k",
|
43 | 55 | "api_key": "[OPEN_AI_API_KEY]"
|
| 56 | + }, |
| 57 | + |
| 58 | + // Open-source LLM deployment using "Text Generation Web UI" with `OpenAI` extension enabled: |
| 59 | + // https://github.com/ivangabriele/openai-autogen-dev-studio#open-source-llms |
| 60 | + // https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai#an-openedai-api-openai-like |
| 61 | + |
| 62 | + // !!! FUNCTION CALLING IS NOT SUPPORTED BY Text Generation Web UI !!! |
| 63 | + |
| 64 | + // This can be also be any inference endpoint compatible following OpenAI API specs, |
| 65 | + // regardless of the model you use behind it. |
| 66 | + { |
| 67 | + "model": "Open-Orca/Mistral-7B-OpenOrca", |
| 68 | + "api_base": "https://[YOUR_CONTAINER_ID]-5001.proxy.runpod.net", // or your local/public endpoint |
| 69 | + // Unless you have setup your endpoint with an API key, you can leave this dummy value: |
| 70 | + "api_key": "sk-111111111111111111111111111111111111111111111111", |
| 71 | + "api_type": "open_ai" |
44 | 72 | }
|
45 | 73 | ]
|
| 74 | + |
| 75 | + // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― |
| 76 | + // Funtionary LLM API Endpoint Configuration |
| 77 | + |
| 78 | + // This must be a secondary deployment. Don't use this endpoint in your models. |
| 79 | + // You can deploy it in one click using this Github repository: |
| 80 | + // https://github.com/ivangabriele/docker-functionary |
| 81 | + // "functionary_model": { |
| 82 | + // "model": "musabgultekin/functionary-7b-v1", |
| 83 | + // "api_base": "https://eoefr8r4dxwu0n-8000.proxy.runpod.net//v1", |
| 84 | + // "api_key": "functionary", |
| 85 | + // "api_type": "open_ai" |
| 86 | + // } |
46 | 87 | }
|
0 commit comments