1
1
import { LONG_DEFAULT_BRANCH_TEMPLATE , LONG_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts' ;
2
2
import { MessageRole , type PromptMessage , type AIClient , type Prompt } from '$lib/ai/types' ;
3
+ import { andThen , buildFailureFromAny , ok , wrap , wrapAsync , type Result } from '$lib/result' ;
3
4
import { isNonEmptyObject } from '$lib/utils/typeguards' ;
4
5
import { fetch , Body , Response } from '@tauri-apps/api/http' ;
5
6
@@ -81,15 +82,22 @@ export class OllamaClient implements AIClient {
81
82
private modelName : string
82
83
) { }
83
84
84
- async evaluate ( prompt : Prompt ) {
85
+ async evaluate ( prompt : Prompt ) : Promise < Result < string , Error > > {
85
86
const messages = this . formatPrompt ( prompt ) ;
86
- const response = await this . chat ( messages ) ;
87
- const rawResponse = JSON . parse ( response . message . content ) ;
88
- if ( ! isOllamaChatMessageFormat ( rawResponse ) ) {
89
- throw new Error ( 'Invalid response: ' + response . message . content ) ;
90
- }
91
87
92
- return rawResponse . result ;
88
+ const responseResult = await this . chat ( messages ) ;
89
+
90
+ return andThen ( responseResult , ( response ) => {
91
+ const rawResponseResult = wrap < unknown , Error > ( ( ) => JSON . parse ( response . message . content ) ) ;
92
+
93
+ return andThen ( rawResponseResult , ( rawResponse ) => {
94
+ if ( ! isOllamaChatMessageFormat ( rawResponse ) ) {
95
+ return buildFailureFromAny ( 'Invalid response: ' + response . message . content ) ;
96
+ }
97
+
98
+ return ok ( rawResponse . result ) ;
99
+ } ) ;
100
+ } ) ;
93
101
}
94
102
95
103
/**
@@ -124,31 +132,32 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
124
132
* @param request - The OllamaChatRequest object containing the request details.
125
133
* @returns A Promise that resolves to the Response object.
126
134
*/
127
- private async fetchChat ( request : OllamaChatRequest ) : Promise < Response < any > > {
135
+ private async fetchChat ( request : OllamaChatRequest ) : Promise < Result < Response < any > , Error > > {
128
136
const url = new URL ( OllamaAPEndpoint . Chat , this . endpoint ) ;
129
137
const body = Body . json ( request ) ;
130
- const result = await fetch ( url . toString ( ) , {
131
- method : 'POST' ,
132
- headers : {
133
- 'Content-Type' : 'application/json'
134
- } ,
135
- body
136
- } ) ;
137
- return result ;
138
+ return await wrapAsync (
139
+ async ( ) =>
140
+ await fetch ( url . toString ( ) , {
141
+ method : 'POST' ,
142
+ headers : {
143
+ 'Content-Type' : 'application/json'
144
+ } ,
145
+ body
146
+ } )
147
+ ) ;
138
148
}
139
149
140
150
/**
141
151
* Sends a chat message to the LLM model and returns the response.
142
152
*
143
153
* @param messages - An array of LLMChatMessage objects representing the chat messages.
144
154
* @param options - Optional LLMRequestOptions object for specifying additional options.
145
- * @throws Error if the response is invalid.
146
155
* @returns A Promise that resolves to an LLMResponse object representing the response from the LLM model.
147
156
*/
148
157
private async chat (
149
158
messages : Prompt ,
150
159
options ?: OllamaRequestOptions
151
- ) : Promise < OllamaChatResponse > {
160
+ ) : Promise < Result < OllamaChatResponse , Error > > {
152
161
const result = await this . fetchChat ( {
153
162
model : this . modelName ,
154
163
stream : false ,
@@ -157,10 +166,12 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
157
166
format : 'json'
158
167
} ) ;
159
168
160
- if ( ! isOllamaChatResponse ( result . data ) ) {
161
- throw new Error ( 'Invalid response\n' + JSON . stringify ( result . data ) ) ;
162
- }
169
+ return andThen ( result , ( result ) => {
170
+ if ( ! isOllamaChatResponse ( result . data ) ) {
171
+ return buildFailureFromAny ( 'Invalid response\n' + JSON . stringify ( result . data ) ) ;
172
+ }
163
173
164
- return result . data ;
174
+ return ok ( result . data ) ;
175
+ } ) ;
165
176
}
166
177
}
0 commit comments