@@ -7,6 +7,7 @@ import { fetch as tauriFetch } from "@tauri-apps/plugin-http";
77import { extractReasoningMiddleware , wrapLanguageModel } from "ai" ;
88import { useMemo } from "react" ;
99
10+ import type { CharTask } from "@hypr/api-client" ;
1011import type { AIProviderStorage } from "@hypr/store" ;
1112
1213import { useAuth } from "../auth" ;
@@ -22,7 +23,7 @@ import {
2223} from "../components/settings/ai/shared/eligibility" ;
2324import { env } from "../env" ;
2425import * as settings from "../store/tinybase/store/settings" ;
25- import { tracedFetch } from "../utils/traced-fetch" ;
26+ import { createTracedFetch , tracedFetch } from "../utils/traced-fetch" ;
2627
2728type LanguageModelV3 = Parameters < typeof wrapLanguageModel > [ 0 ] [ "model" ] ;
2829
@@ -52,9 +53,12 @@ type LLMConnectionResult = {
5253 status : LLMConnectionStatus ;
5354} ;
5455
55- export const useLanguageModel = ( ) : LanguageModelV3 | null => {
56+ export const useLanguageModel = ( task ?: CharTask ) : LanguageModelV3 | null => {
5657 const { conn } = useLLMConnection ( ) ;
57- return useMemo ( ( ) => ( conn ? createLanguageModel ( conn ) : null ) , [ conn ] ) ;
58+ return useMemo (
59+ ( ) => ( conn ? createLanguageModel ( conn , task ) : null ) ,
60+ [ conn , task ] ,
61+ ) ;
5862} ;
5963
6064export const useLLMConnection = ( ) : LLMConnectionResult => {
@@ -227,11 +231,14 @@ const wrapWithThinkingMiddleware = (
227231 } ) ;
228232} ;
229233
230- const createLanguageModel = ( conn : LLMConnectionInfo ) : LanguageModelV3 => {
234+ const createLanguageModel = (
235+ conn : LLMConnectionInfo ,
236+ task ?: CharTask ,
237+ ) : LanguageModelV3 => {
231238 switch ( conn . providerId ) {
232239 case "hyprnote" : {
233240 const provider = createOpenRouter ( {
234- fetch : tracedFetch ,
241+ fetch : task ? createTracedFetch ( task ) : tracedFetch ,
235242 baseURL : conn . baseUrl ,
236243 apiKey : conn . apiKey ,
237244 } ) ;
0 commit comments