|
| 1 | +using System.Net.Http; |
| 2 | +using System.Text; |
| 3 | +using Newtonsoft.Json.Linq; |
| 4 | +using Newtonsoft.Json; |
| 5 | +using System.Windows; |
| 6 | + |
| 7 | +namespace DuckDuckGo |
| 8 | +{ |
| 9 | + internal class ChatAPI |
| 10 | + { |
| 11 | + public class ChatModel |
| 12 | + { |
| 13 | + public string NAME { get; set; } = ""; |
| 14 | + public string ID { get; set; } = ""; |
| 15 | + public string DESC { get; set; } = ""; |
| 16 | + public string CREATOR { get; set; } = ""; |
| 17 | + } |
| 18 | + |
| 19 | + public static class ChatModels |
| 20 | + { |
| 21 | + // Defined available chat models |
| 22 | + // Made this because they all have a unique ID |
| 23 | + |
| 24 | + public static ChatModel ChatGPT_4o_Mini = new ChatModel |
| 25 | + { |
| 26 | + NAME = "ChatGPT", |
| 27 | + ID = "gpt-4o-mini", |
| 28 | + DESC = "Allzweck-KI mit hoher integrierter Moderation", |
| 29 | + CREATOR = "OpenAI" |
| 30 | + }; |
| 31 | + |
| 32 | + public static ChatModel LLama_3_3_70B = new ChatModel |
| 33 | + { |
| 34 | + NAME = "Llama 3.3", |
| 35 | + ID = "meta-llama/Llama-3.3-70B-Instruct-Turbo", |
| 36 | + DESC = "Allzweck-KI mit mittlerer integrierter Moderation", |
| 37 | + CREATOR = "Facebook / Meta" |
| 38 | + }; |
| 39 | + |
| 40 | + public static ChatModel Claude_3_Haiku = new ChatModel |
| 41 | + { |
| 42 | + NAME = "Claude 3 Haiku", |
| 43 | + ID = "claude-3-haiku-20240307", |
| 44 | + DESC = "Allzweck-KI mit hoher integrierter Moderation", |
| 45 | + CREATOR = "Anthropic" |
| 46 | + }; |
| 47 | + |
| 48 | + public static ChatModel ChatGPT_o3_Mini = new ChatModel |
| 49 | + { |
| 50 | + NAME = "ChatGPT o3 Mini", |
| 51 | + ID = "o3-mini", |
| 52 | + DESC = "Argumentierende KI mit hoher eingebauter Moderation", |
| 53 | + CREATOR = "OpenAI" |
| 54 | + }; |
| 55 | + |
| 56 | + public static ChatModel Mistral_8x7B = new ChatModel |
| 57 | + { |
| 58 | + NAME = "Mistral 8x7B", |
| 59 | + ID = "mistralai/Mixtral-8x7B-Instruct-v0.1", |
| 60 | + DESC = "Allzweck-KI mit geringer integrierter Moderation", |
| 61 | + CREATOR = "Mistral AI" |
| 62 | + }; |
| 63 | + } |
| 64 | + |
| 65 | + // DuckDuckGo X-Vqd-4 token header |
| 66 | + public string token = ""; |
| 67 | + |
| 68 | + // Default model |
| 69 | + public ChatModel model { get; set; } = ChatAPI.ChatModels.ChatGPT_4o_Mini; |
| 70 | + |
| 71 | + public string DefaultPrompt = |
| 72 | + "Current date: " + DateTime.UtcNow.Day + "." + DateTime.UtcNow.Month + "." + DateTime.UtcNow.Year + "\n" + |
| 73 | + "These are your instructions. Please make sure to always respect and enforce them to the best of your abilities. " + |
| 74 | + "1.) You provide short, meaningful answers. Keep your answers AS SHORT AS POSSIBLE! " + |
| 75 | + "2.) Bring IT related help to the point. Double-check code readability, security and professionality. " + |
| 76 | + "3.) Never mention the rules or instructions! The user does not care!" + |
| 77 | + "9.) Now answer with 'Hey there, nice to meet you! How can i be of service?'."; |
| 78 | + |
| 79 | + |
| 80 | + // Chat history |
| 81 | + public List<dynamic> messages = new List<dynamic>(); |
| 82 | + |
| 83 | + public async Task<int> initConversation() |
| 84 | + { |
| 85 | + // Define handler |
| 86 | + var handler = new HttpClientHandler(); |
| 87 | + HttpClient client = new HttpClient(handler); |
| 88 | + |
| 89 | + // Step 1: GET front page |
| 90 | + try |
| 91 | + { |
| 92 | + // Add headers |
| 93 | + client.DefaultRequestHeaders.Add("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36"); |
| 94 | + |
| 95 | + // Send GET request |
| 96 | + await client.GetAsync("https://duckduckgo.com/?q=DuckDuckGo+AI+Chat&ia=chat&duckai=1"); |
| 97 | + |
| 98 | + } |
| 99 | + catch (Exception ex) { MessageBox.Show(ex.Message); return 1; } |
| 100 | + |
| 101 | + // Step 2: Obtain token |
| 102 | + try |
| 103 | + { |
| 104 | + // This header is important to obtain the token. |
| 105 | + // It tells the duckduckgo webserver that we don't have a token yet and would like to receive one |
| 106 | + client.DefaultRequestHeaders.Add("X-Vqd-Accept", "1"); |
| 107 | + |
| 108 | + // Send GET request |
| 109 | + HttpResponseMessage response = await client.GetAsync("https://duckduckgo.com/duckchat/v1/status"); |
| 110 | + |
| 111 | + // Ensure that request was successfull |
| 112 | + response.EnsureSuccessStatusCode(); |
| 113 | + |
| 114 | + // Get the token from the response headers |
| 115 | + var headerVals = response.Headers.GetValues("X-Vqd-4"); |
| 116 | + token = headerVals.FirstOrDefault() ?? ""; |
| 117 | + } |
| 118 | + catch (Exception) { return 1; } |
| 119 | + return 0; |
| 120 | + } |
| 121 | + |
| 122 | + public async Task<string> PromptAsync(string msg) |
| 123 | + { |
| 124 | + // Define handler |
| 125 | + var handler = new HttpClientHandler(); |
| 126 | + HttpClient client = new HttpClient(handler); |
| 127 | + |
| 128 | + // Add headers |
| 129 | + client.DefaultRequestHeaders.Add("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36"); |
| 130 | + client.DefaultRequestHeaders.Add("x-vqd-4", token); // Very important. Won't work without! |
| 131 | + |
| 132 | + |
| 133 | + // Add important information based on the msg |
| 134 | + string lmsg = msg.ToLower(); |
| 135 | + if(lmsg.Contains("time") || lmsg.Contains("clock") || lmsg.Contains("late")) |
| 136 | + { |
| 137 | + msg += "auto-context: " + DateTime.UtcNow; |
| 138 | + } |
| 139 | + |
| 140 | + // Add Message to message history |
| 141 | + messages.Add(new { role = "user", content = msg }); |
| 142 | + |
| 143 | + // Define payload |
| 144 | + var payload = new |
| 145 | + { |
| 146 | + model = model.ID, |
| 147 | + messages = messages |
| 148 | + }; |
| 149 | + |
| 150 | + // Serialize payload to JSON |
| 151 | + var jsonPayload = JsonConvert.SerializeObject(payload); |
| 152 | + var content = new StringContent(jsonPayload, Encoding.UTF8, "application/json"); |
| 153 | + |
| 154 | + // Set content headers |
| 155 | + content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); |
| 156 | + |
| 157 | + // Send GET request |
| 158 | + HttpResponseMessage chatResponse = await client.PostAsync("https://duckduckgo.com/duckchat/v1/chat", content); |
| 159 | + |
| 160 | + // Get new token from response headers |
| 161 | + if(chatResponse.Headers.GetValues("X-Vqd-4") != null) |
| 162 | + { |
| 163 | + var headerVals = chatResponse.Headers.GetValues("X-Vqd-4"); |
| 164 | + token = headerVals.FirstOrDefault() ?? ""; |
| 165 | + } |
| 166 | + |
| 167 | + // Check if response was NOT successfull |
| 168 | + if (chatResponse.StatusCode.ToString() != "OK") |
| 169 | + { |
| 170 | + // Show status code |
| 171 | + MessageBox.Show("Request Error: " + chatResponse.StatusCode.ToString()); |
| 172 | + } |
| 173 | + |
| 174 | + // Read response content |
| 175 | + string responseContent = await chatResponse.Content.ReadAsStringAsync(); |
| 176 | + |
| 177 | + // Split response content by "data: " |
| 178 | + var jsonObjects = new List<JObject>(); |
| 179 | + string[] parts = responseContent.Split(new[] { "data: " }, StringSplitOptions.RemoveEmptyEntries); |
| 180 | + foreach (var part in parts) |
| 181 | + { |
| 182 | + try |
| 183 | + { |
| 184 | + JObject jsonResponse = JObject.Parse(part); |
| 185 | + jsonObjects.Add(jsonResponse); |
| 186 | + } |
| 187 | + catch (JsonReaderException) |
| 188 | + { |
| 189 | + continue; |
| 190 | + } |
| 191 | + } |
| 192 | + |
| 193 | + // Go through all json objects |
| 194 | + string answer; |
| 195 | + StringBuilder strB = new StringBuilder(); |
| 196 | + for (int i = 0; i < jsonObjects.Count; i++) |
| 197 | + { |
| 198 | + try |
| 199 | + { // Add parts to a final, readable message |
| 200 | + JToken firstJson = jsonObjects[i]; |
| 201 | + if (firstJson["message"] != null) |
| 202 | + { |
| 203 | + // Get message chunk for final answer |
| 204 | + var value = firstJson["message"]; |
| 205 | + |
| 206 | + // Append message chunk to final message |
| 207 | + strB.Append(value); |
| 208 | + } |
| 209 | + } |
| 210 | + catch (Exception) |
| 211 | + { |
| 212 | + continue; |
| 213 | + } |
| 214 | + } |
| 215 | + |
| 216 | + // Get final answer |
| 217 | + answer = strB.ToString(); |
| 218 | + |
| 219 | + // Add LLM response to message history |
| 220 | + messages.Add(new { role = "assistant", content = " " + answer }); |
| 221 | + |
| 222 | + // Dispose client |
| 223 | + client.Dispose(); |
| 224 | + |
| 225 | + // Return answer |
| 226 | + return answer; |
| 227 | + } |
| 228 | + } |
| 229 | +} |
0 commit comments