Chat.swift 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185
  1. // Copyright 2023 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. import Foundation
  15. /// An object that represents a back-and-forth chat with a model, capturing the history and saving
  16. /// the context in memory between each message sent.
  17. @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
  18. public final class Chat: Sendable {
  19. private let model: GenerativeModel
  20. /// Initializes a new chat representing a 1:1 conversation between model and user.
  21. init(model: GenerativeModel, history: [ModelContent]) {
  22. self.model = model
  23. self.history = history
  24. }
  25. private let historyLock = NSLock()
  26. #if compiler(>=6)
  27. private nonisolated(unsafe) var _history: [ModelContent] = []
  28. #else
  29. private var _history: [ModelContent] = []
  30. #endif
  31. /// The previous content from the chat that has been successfully sent and received from the
  32. /// model. This will be provided to the model for each message sent as context for the discussion.
  33. public var history: [ModelContent] {
  34. get {
  35. historyLock.withLock { _history }
  36. }
  37. set {
  38. historyLock.withLock { _history = newValue }
  39. }
  40. }
  41. private func appendHistory(contentsOf: [ModelContent]) {
  42. historyLock.withLock {
  43. _history.append(contentsOf: contentsOf)
  44. }
  45. }
  46. /// Sends a message using the existing history of this chat as context. If successful, the message
  47. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  48. /// - Parameter parts: The new content to send as a single chat message.
  49. /// - Returns: The model's response if no error occurred.
  50. /// - Throws: A ``GenerateContentError`` if an error occurred.
  51. public func sendMessage(_ parts: any PartsRepresentable...) async throws
  52. -> GenerateContentResponse {
  53. return try await sendMessage([ModelContent(parts: parts)])
  54. }
  55. /// Sends a message using the existing history of this chat as context. If successful, the message
  56. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  57. /// - Parameter content: The new content to send as a single chat message.
  58. /// - Returns: The model's response if no error occurred.
  59. /// - Throws: A ``GenerateContentError`` if an error occurred.
  60. public func sendMessage(_ content: [ModelContent]) async throws
  61. -> GenerateContentResponse {
  62. // Ensure that the new content has the role set.
  63. let newContent = content.map(populateContentRole(_:))
  64. // Send the history alongside the new message as context.
  65. let request = history + newContent
  66. let result = try await model.generateContent(request)
  67. guard let reply = result.candidates.first?.content else {
  68. let error = NSError(domain: "com.google.generative-ai",
  69. code: -1,
  70. userInfo: [
  71. NSLocalizedDescriptionKey: "No candidates with content available.",
  72. ])
  73. throw GenerateContentError.internalError(underlying: error)
  74. }
  75. // Make sure we inject the role into the content received.
  76. let toAdd = ModelContent(role: "model", parts: reply.parts)
  77. // Append the request and successful result to history, then return the value.
  78. appendHistory(contentsOf: newContent)
  79. history.append(toAdd)
  80. return result
  81. }
  82. /// Sends a message using the existing history of this chat as context. If successful, the message
  83. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  84. /// - Parameter parts: The new content to send as a single chat message.
  85. /// - Returns: A stream containing the model's response or an error if an error occurred.
  86. @available(macOS 12.0, *)
  87. public func sendMessageStream(_ parts: any PartsRepresentable...) throws
  88. -> AsyncThrowingStream<GenerateContentResponse, Error> {
  89. return try sendMessageStream([ModelContent(parts: parts)])
  90. }
  91. /// Sends a message using the existing history of this chat as context. If successful, the message
  92. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  93. /// - Parameter content: The new content to send as a single chat message.
  94. /// - Returns: A stream containing the model's response or an error if an error occurred.
  95. @available(macOS 12.0, *)
  96. public func sendMessageStream(_ content: [ModelContent]) throws
  97. -> AsyncThrowingStream<GenerateContentResponse, Error> {
  98. // Ensure that the new content has the role set.
  99. let newContent: [ModelContent] = content.map(populateContentRole(_:))
  100. // Send the history alongside the new message as context.
  101. let request = history + newContent
  102. let stream = try model.generateContentStream(request)
  103. return AsyncThrowingStream { continuation in
  104. Task {
  105. var aggregatedContent: [ModelContent] = []
  106. do {
  107. for try await chunk in stream {
  108. // Capture any content that's streaming. This should be populated if there's no error.
  109. if let chunkContent = chunk.candidates.first?.content {
  110. aggregatedContent.append(chunkContent)
  111. }
  112. // Pass along the chunk.
  113. continuation.yield(chunk)
  114. }
  115. } catch {
  116. // Rethrow the error that the underlying stream threw. Don't add anything to history.
  117. continuation.finish(throwing: error)
  118. return
  119. }
  120. // Save the request.
  121. appendHistory(contentsOf: newContent)
  122. // Aggregate the content to add it to the history before we finish.
  123. let aggregated = self.aggregatedChunks(aggregatedContent)
  124. self.history.append(aggregated)
  125. continuation.finish()
  126. }
  127. }
  128. }
  129. private func aggregatedChunks(_ chunks: [ModelContent]) -> ModelContent {
  130. var parts: [any Part] = []
  131. var combinedText = ""
  132. for aggregate in chunks {
  133. // Loop through all the parts, aggregating the text and adding the images.
  134. for part in aggregate.parts {
  135. switch part {
  136. case let textPart as TextPart:
  137. combinedText += textPart.text
  138. default:
  139. // Don't combine it, just add to the content. If there's any text pending, add that as
  140. // a part.
  141. if !combinedText.isEmpty {
  142. parts.append(TextPart(combinedText))
  143. combinedText = ""
  144. }
  145. parts.append(part)
  146. }
  147. }
  148. }
  149. if !combinedText.isEmpty {
  150. parts.append(TextPart(combinedText))
  151. }
  152. return ModelContent(role: "model", parts: parts)
  153. }
  154. /// Populates the `role` field with `user` if it doesn't exist. Required in chat sessions.
  155. private func populateContentRole(_ content: ModelContent) -> ModelContent {
  156. if content.role != nil {
  157. return content
  158. } else {
  159. return ModelContent(role: "user", parts: content.parts)
  160. }
  161. }
  162. }