@@ -12,7 +12,7 @@ import type { NoExcessProperties } from "effect/Types"
1212import type { AiError } from "./AiError.js"
1313import * as AiInput from "./AiInput.js"
1414import * as AiLanguageModel from "./AiLanguageModel.js"
15- import * as AiResponse from "./AiResponse_old .js"
15+ import * as AiResponse from "./AiResponse .js"
1616
1717/**
1818 * @since 1.0.0
@@ -21,7 +21,7 @@ import * as AiResponse from "./AiResponse_old.js"
2121export class AiChat extends Context . Tag ( "@effect/ai/AiChat" ) <
2222 AiChat ,
2323 AiChat . Service
24- > ( ) { }
24+ > ( ) { }
2525
2626/**
2727 * @since 1.0.0
@@ -59,7 +59,7 @@ export declare namespace AiChat {
5959 readonly generateText : <
6060 Options extends NoExcessProperties < Omit < AiLanguageModel . GenerateTextOptions < any > , "system" > , Options >
6161 > ( options : Options ) => Effect . Effect <
62- AiResponse . AiResponse ,
62+ AiLanguageModel . ExtractSuccess < Options > ,
6363 AiLanguageModel . ExtractError < Options > ,
6464 AiLanguageModel . ExtractContext < Options >
6565 >
@@ -76,7 +76,7 @@ export declare namespace AiChat {
7676 readonly streamText : <
7777 Options extends NoExcessProperties < Omit < AiLanguageModel . GenerateTextOptions < any > , "system" > , Options >
7878 > ( options : Options ) => Stream . Stream <
79- AiResponse . AiResponse ,
79+ AiLanguageModel . ExtractSuccess < Options > ,
8080 AiLanguageModel . ExtractError < Options > ,
8181 AiLanguageModel . ExtractContext < Options >
8282 >
@@ -105,107 +105,105 @@ export declare namespace AiChat {
105105 * @since 1.0.0
106106 * @category constructors
107107 */
108- export const fromPrompt = Effect . fnUntraced (
109- function * ( options : {
110- readonly prompt : AiInput . Raw
111- readonly system ?: string
112- } ) {
113- const languageModel = yield * AiLanguageModel . AiLanguageModel
114- const history = yield * Ref . make < AiInput . AiInput > ( AiInput . make ( options . prompt ) )
115- const semaphore = yield * Effect . makeSemaphore ( 1 )
108+ export const fromPrompt = Effect . fnUntraced ( function * ( options : {
109+ readonly prompt : AiInput . Raw
110+ readonly system ?: string
111+ } ) {
112+ const languageModel = yield * AiLanguageModel . AiLanguageModel
113+ const history = yield * Ref . make < AiInput . AiInput > ( AiInput . make ( options . prompt ) )
114+ const semaphore = yield * Effect . makeSemaphore ( 1 )
116115
117- return AiChat . of ( {
118- history : Ref . get ( history ) ,
119- export : Ref . get ( history ) . pipe (
120- Effect . flatMap ( Schema . encode ( AiInput . AiInput ) ) ,
121- Effect . orDie
122- ) ,
123- exportJson : Ref . get ( history ) . pipe (
124- Effect . flatMap ( Schema . encode ( AiInput . AiInputFromJson ) ) ,
125- Effect . orDie
126- ) ,
127- generateText ( options ) {
128- const newParts = AiInput . make ( options . prompt )
129- return Ref . get ( history ) . pipe (
130- Effect . flatMap ( ( parts ) => {
131- const allParts = [ ...parts , ...newParts ]
132- return languageModel . generateText ( {
116+ return AiChat . of ( {
117+ history : Ref . get ( history ) ,
118+ export : Ref . get ( history ) . pipe (
119+ Effect . flatMap ( Schema . encode ( AiInput . AiInput ) ) ,
120+ Effect . orDie
121+ ) ,
122+ exportJson : Ref . get ( history ) . pipe (
123+ Effect . flatMap ( Schema . encode ( AiInput . FromJson ) ) ,
124+ Effect . orDie
125+ ) ,
126+ generateText ( options ) {
127+ const newInput = AiInput . make ( options . prompt )
128+ return Ref . get ( history ) . pipe (
129+ Effect . flatMap ( ( oldInput ) => {
130+ const input = AiInput . concat ( oldInput , newInput )
131+ return languageModel . generateText ( {
132+ ...options ,
133+ prompt : input
134+ } ) . pipe (
135+ Effect . tap ( ( response ) => {
136+ const modelInput = AiInput . make ( response )
137+ return Ref . set ( history , AiInput . concat ( input , modelInput ) )
138+ } )
139+ )
140+ } ) ,
141+ semaphore . withPermits ( 1 ) ,
142+ Effect . withSpan ( "AiChat.send" , { captureStackTrace : false } )
143+ )
144+ } ,
145+ streamText ( options ) {
146+ return Stream . suspend ( ( ) => {
147+ let combined = AiResponse . empty
148+ return Stream . fromChannel ( Channel . acquireUseRelease (
149+ semaphore . take ( 1 ) . pipe (
150+ Effect . zipRight ( Ref . get ( history ) ) ,
151+ Effect . map ( ( history ) => AiInput . concat ( history , AiInput . make ( options . prompt ) ) )
152+ ) ,
153+ ( parts ) =>
154+ languageModel . streamText ( {
133155 ...options ,
134- prompt : allParts
156+ prompt : parts
135157 } ) . pipe (
136- Effect . tap ( ( response ) => {
137- const responseParts = AiInput . make ( response )
138- return Ref . set ( history , [ ...allParts , ...responseParts ] )
139- } )
140- )
141- } ) ,
142- semaphore . withPermits ( 1 ) ,
143- Effect . withSpan ( "AiChat.send" , { captureStackTrace : false } )
144- )
145- } ,
146- streamText ( options ) {
147- return Stream . suspend ( ( ) => {
148- let combined = AiResponse . AiResponse . empty
149- return Stream . fromChannel ( Channel . acquireUseRelease (
150- semaphore . take ( 1 ) . pipe (
151- Effect . zipRight ( Ref . get ( history ) ) ,
152- Effect . map ( ( history ) => [ ...history , ...AiInput . make ( options . prompt ) ] )
158+ Stream . map ( ( chunk ) => {
159+ combined = AiResponse . merge ( combined , chunk )
160+ return chunk
161+ } ) ,
162+ Stream . toChannel
153163 ) ,
154- ( parts ) =>
155- languageModel . streamText ( {
156- ...options ,
157- prompt : parts
158- } ) . pipe (
159- Stream . map ( ( chunk ) => {
160- combined = combined . merge ( chunk )
161- return chunk
162- } ) ,
163- Stream . toChannel
164- ) ,
165- ( parts ) =>
166- Effect . zipRight (
167- Ref . set ( history , [ ...parts , ...AiInput . make ( combined ) ] ) ,
168- semaphore . release ( 1 )
164+ ( parts ) =>
165+ Effect . zipRight (
166+ Ref . set ( history , AiInput . concat ( parts , AiInput . make ( combined ) ) ) ,
167+ semaphore . release ( 1 )
168+ )
169+ ) )
170+ } ) . pipe ( Stream . withSpan ( "AiChat.stream" , {
171+ captureStackTrace : false
172+ } ) ) as any
173+ } ,
174+ generateObject ( options ) {
175+ const newInput = AiInput . make ( options . prompt )
176+ return Ref . get ( history ) . pipe (
177+ Effect . flatMap ( ( oldInput ) => {
178+ const input = AiInput . concat ( oldInput , newInput )
179+ return languageModel . generateObject ( {
180+ ...options ,
181+ prompt : input
182+ } as any ) . pipe (
183+ Effect . flatMap ( ( response ) => {
184+ const modelInput = AiInput . make ( response )
185+ return Effect . as (
186+ Ref . set ( history , AiInput . concat ( input , modelInput ) ) ,
187+ response . value
169188 )
170- ) )
171- } ) . pipe ( Stream . withSpan ( "AiChat.stream" , {
189+ } )
190+ )
191+ } ) ,
192+ semaphore . withPermits ( 1 ) ,
193+ Effect . withSpan ( "AiChat.structured" , {
194+ attributes : {
195+ toolCallId : "toolCallId" in options
196+ ? options . toolCallId
197+ : "_tag" in options . schema
198+ ? options . schema . _tag
199+ : ( options . schema as any ) . identifier
200+ } ,
172201 captureStackTrace : false
173- } ) )
174- } ,
175- generateObject ( options ) {
176- const newParts = AiInput . make ( options . prompt )
177- return Ref . get ( history ) . pipe (
178- Effect . flatMap ( ( parts ) => {
179- const allParts = [ ...parts , ...newParts ]
180- return languageModel . generateObject ( {
181- ...options ,
182- prompt : allParts
183- } as any ) . pipe (
184- Effect . flatMap ( ( response ) => {
185- const responseParts = AiInput . make ( response )
186- return Effect . as (
187- Ref . set ( history , [ ...allParts , ...responseParts ] ) ,
188- response . value
189- )
190- } )
191- )
192- } ) ,
193- semaphore . withPermits ( 1 ) ,
194- Effect . withSpan ( "AiChat.structured" , {
195- attributes : {
196- toolCallId : "toolCallId" in options
197- ? options . toolCallId
198- : "_tag" in options . schema
199- ? options . schema . _tag
200- : ( options . schema as any ) . identifier
201- } ,
202- captureStackTrace : false
203- } )
204- ) as any
205- }
206- } )
207- }
208- )
202+ } )
203+ ) as any
204+ }
205+ } )
206+ } )
209207
210208/**
211209 * @since 1.0.0
@@ -222,7 +220,7 @@ const decodeUnknown = Schema.decodeUnknown(AiInput.AiInput)
222220export const fromExport = ( data : unknown ) : Effect . Effect < AiChat . Service , ParseError , AiLanguageModel . AiLanguageModel > =>
223221 Effect . flatMap ( decodeUnknown ( data ) , ( prompt ) => fromPrompt ( { prompt } ) )
224222
225- const decodeJson = Schema . decode ( AiInput . AiInputFromJson )
223+ const decodeJson = Schema . decode ( AiInput . FromJson )
226224
227225/**
228226 * @since 1.0.0
0 commit comments