Chat.swift 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. // Copyright 2023 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. import Foundation
  15. /// An object that represents a back-and-forth chat with a model, capturing the history and saving
  16. /// the context in memory between each message sent.
  17. @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
  18. public final class Chat: Sendable {
  19. private let model: GenerativeModel
  20. /// Initializes a new chat representing a 1:1 conversation between model and user.
  21. init(model: GenerativeModel, history: [ModelContent]) {
  22. self.model = model
  23. self.history = history
  24. }
  25. private let historyLock = NSLock()
  26. private nonisolated(unsafe) var _history: [ModelContent] = []
  27. /// The previous content from the chat that has been successfully sent and received from the
  28. /// model. This will be provided to the model for each message sent as context for the discussion.
  29. public var history: [ModelContent] {
  30. get {
  31. historyLock.withLock { _history }
  32. }
  33. set {
  34. historyLock.withLock { _history = newValue }
  35. }
  36. }
  37. private func appendHistory(contentsOf: [ModelContent]) {
  38. historyLock.withLock {
  39. _history.append(contentsOf: contentsOf)
  40. }
  41. }
  42. private func appendHistory(_ newElement: ModelContent) {
  43. historyLock.withLock {
  44. _history.append(newElement)
  45. }
  46. }
  47. /// Sends a message using the existing history of this chat as context. If successful, the message
  48. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  49. /// - Parameter parts: The new content to send as a single chat message.
  50. /// - Returns: The model's response if no error occurred.
  51. /// - Throws: A ``GenerateContentError`` if an error occurred.
  52. public func sendMessage(_ parts: any PartsRepresentable...) async throws
  53. -> GenerateContentResponse {
  54. return try await sendMessage([ModelContent(parts: parts)])
  55. }
  56. /// Sends a message using the existing history of this chat as context. If successful, the message
  57. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  58. /// - Parameter content: The new content to send as a single chat message.
  59. /// - Returns: The model's response if no error occurred.
  60. /// - Throws: A ``GenerateContentError`` if an error occurred.
  61. public func sendMessage(_ content: [ModelContent]) async throws
  62. -> GenerateContentResponse {
  63. // Ensure that the new content has the role set.
  64. let newContent = content.map(populateContentRole(_:))
  65. // Send the history alongside the new message as context.
  66. let request = history + newContent
  67. let result = try await model.generateContent(request)
  68. guard let reply = result.candidates.first?.content else {
  69. let error = NSError(domain: "com.google.generative-ai",
  70. code: -1,
  71. userInfo: [
  72. NSLocalizedDescriptionKey: "No candidates with content available.",
  73. ])
  74. throw GenerateContentError.internalError(underlying: error)
  75. }
  76. // Make sure we inject the role into the content received.
  77. let toAdd = ModelContent(role: "model", parts: reply.parts)
  78. // Append the request and successful result to history, then return the value.
  79. appendHistory(contentsOf: newContent)
  80. appendHistory(toAdd)
  81. return result
  82. }
  83. /// Sends a message using the existing history of this chat as context. If successful, the message
  84. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  85. /// - Parameter parts: The new content to send as a single chat message.
  86. /// - Returns: A stream containing the model's response or an error if an error occurred.
  87. @available(macOS 12.0, *)
  88. public func sendMessageStream(_ parts: any PartsRepresentable...) throws
  89. -> AsyncThrowingStream<GenerateContentResponse, Error> {
  90. return try sendMessageStream([ModelContent(parts: parts)])
  91. }
  92. /// Sends a message using the existing history of this chat as context. If successful, the message
  93. /// and response will be added to the history. If unsuccessful, history will remain unchanged.
  94. /// - Parameter content: The new content to send as a single chat message.
  95. /// - Returns: A stream containing the model's response or an error if an error occurred.
  96. @available(macOS 12.0, *)
  97. public func sendMessageStream(_ content: [ModelContent]) throws
  98. -> AsyncThrowingStream<GenerateContentResponse, Error> {
  99. // Ensure that the new content has the role set.
  100. let newContent: [ModelContent] = content.map(populateContentRole(_:))
  101. // Send the history alongside the new message as context.
  102. let request = history + newContent
  103. let stream = try model.generateContentStream(request)
  104. return AsyncThrowingStream { continuation in
  105. Task {
  106. var aggregatedContent: [ModelContent] = []
  107. do {
  108. for try await chunk in stream {
  109. // Capture any content that's streaming. This should be populated if there's no error.
  110. if let chunkContent = chunk.candidates.first?.content {
  111. aggregatedContent.append(chunkContent)
  112. }
  113. // Pass along the chunk.
  114. continuation.yield(chunk)
  115. }
  116. } catch {
  117. // Rethrow the error that the underlying stream threw. Don't add anything to history.
  118. continuation.finish(throwing: error)
  119. return
  120. }
  121. // Save the request.
  122. appendHistory(contentsOf: newContent)
  123. // Aggregate the content to add it to the history before we finish.
  124. let aggregated = self.aggregatedChunks(aggregatedContent)
  125. self.appendHistory(aggregated)
  126. continuation.finish()
  127. }
  128. }
  129. }
  130. private func aggregatedChunks(_ chunks: [ModelContent]) -> ModelContent {
  131. var parts: [any Part] = []
  132. var combinedText = ""
  133. for aggregate in chunks {
  134. // Loop through all the parts, aggregating the text and adding the images.
  135. for part in aggregate.parts {
  136. switch part {
  137. case let textPart as TextPart:
  138. combinedText += textPart.text
  139. default:
  140. // Don't combine it, just add to the content. If there's any text pending, add that as
  141. // a part.
  142. if !combinedText.isEmpty {
  143. parts.append(TextPart(combinedText))
  144. combinedText = ""
  145. }
  146. parts.append(part)
  147. }
  148. }
  149. }
  150. if !combinedText.isEmpty {
  151. parts.append(TextPart(combinedText))
  152. }
  153. return ModelContent(role: "model", parts: parts)
  154. }
  155. /// Populates the `role` field with `user` if it doesn't exist. Required in chat sessions.
  156. private func populateContentRole(_ content: ModelContent) -> ModelContent {
  157. if content.role != nil {
  158. return content
  159. } else {
  160. return ModelContent(role: "user", parts: content.parts)
  161. }
  162. }
  163. }