Skip to content

Commit 3e176b8

Browse files
committed
wip
1 parent 14613d0 commit 3e176b8

File tree

3 files changed

+7
-3
lines changed

3 files changed

+7
-3
lines changed

src/Models/Config.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ public enum Order: String, Equatable, Hashable, Codable, Sendable {
147147
}
148148

149149
public enum CacheRetention: String, Equatable, Hashable, Codable, Sendable {
150-
case `24h`
150+
case oneDay = "24h"
151151
case inMemory = "in_memory"
152152
}
153153

src/Models/Request.swift

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ import MetaCodable
110110

111111
/// The retention policy for the prompt cache.
112112
///
113-
/// Set to `24h` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours.
113+
/// Set to `oneDay` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours.
114114
public var promptCacheRetention: CacheRetention?
115115

116116
/// Configuration options for [reasoning models](https://platform.openai.com/docs/guides/reasoning).
@@ -219,6 +219,7 @@ import MetaCodable
219219
previousResponseId: String? = nil,
220220
prompt: Prompt? = nil,
221221
promptCacheKey: String? = nil,
222+
promptCacheRetention: CacheRetention? = nil,
222223
reasoning: ReasoningConfig? = nil,
223224
safetyIdentifier: String? = nil,
224225
serviceTier: ServiceTier? = nil,
@@ -261,5 +262,6 @@ import MetaCodable
261262
self.safetyIdentifier = safetyIdentifier
262263
self.parallelToolCalls = parallelToolCalls
263264
self.previousResponseId = previousResponseId
265+
self.promptCacheRetention = promptCacheRetention
264266
}
265267
}

src/Models/Response.swift

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ import HelperCoders
183183

184184
/// The retention policy for the prompt cache.
185185
///
186-
/// Set to `24h` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours.
186+
/// Set to `oneDay` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours.
187187
public var promptCacheRetention: CacheRetention?
188188

189189
/// Configuration options for [reasoning models](https://platform.openai.com/docs/guides/reasoning).
@@ -307,6 +307,7 @@ import HelperCoders
307307
previousResponseId: String? = nil,
308308
prompt: Prompt? = nil,
309309
promptCacheKey: String? = nil,
310+
promptCacheRetention: CacheRetention? = nil,
310311
reasoning: ReasoningConfig? = nil,
311312
safetyIdentifier: String? = nil,
312313
status: Status,
@@ -344,6 +345,7 @@ import HelperCoders
344345
self.incompleteDetails = incompleteDetails
345346
self.parallelToolCalls = parallelToolCalls
346347
self.previousResponseId = previousResponseId
348+
self.promptCacheRetention = promptCacheRetention
347349
}
348350
}
349351

0 commit comments

Comments
 (0)