ErrorDetailsView.swift 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. // Copyright 2023 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. import FirebaseVertexAI
  15. import MarkdownUI
  16. import SwiftUI
  17. private extension HarmCategory {
  18. /// Returns a description of the `HarmCategory` suitable for displaying in the UI.
  19. var displayValue: String {
  20. switch self {
  21. case .dangerousContent: "Dangerous content"
  22. case .harassment: "Harassment"
  23. case .hateSpeech: "Hate speech"
  24. case .sexuallyExplicit: "Sexually explicit"
  25. case .civicIntegrity: "Civic integrity"
  26. default: "Unknown HarmCategory: \(rawValue)"
  27. }
  28. }
  29. }
  30. private extension SafetyRating.HarmProbability {
  31. /// Returns a description of the `HarmProbability` suitable for displaying in the UI.
  32. var displayValue: String {
  33. switch self {
  34. case .high: "High"
  35. case .low: "Low"
  36. case .medium: "Medium"
  37. case .negligible: "Negligible"
  38. default: "Unknown HarmProbability: \(rawValue)"
  39. }
  40. }
  41. }
  42. private struct SubtitleFormRow: View {
  43. var title: String
  44. var value: String
  45. var body: some View {
  46. VStack(alignment: .leading) {
  47. Text(title)
  48. .font(.subheadline)
  49. Text(value)
  50. }
  51. }
  52. }
  53. private struct SubtitleMarkdownFormRow: View {
  54. var title: String
  55. var value: String
  56. var body: some View {
  57. VStack(alignment: .leading) {
  58. Text(title)
  59. .font(.subheadline)
  60. Markdown(value)
  61. }
  62. }
  63. }
  64. private struct SafetyRatingsSection: View {
  65. var ratings: [SafetyRating]
  66. var body: some View {
  67. Section("Safety ratings") {
  68. List(ratings, id: \.self) { rating in
  69. HStack {
  70. Text(rating.category.displayValue).font(.subheadline)
  71. Spacer()
  72. Text(rating.probability.displayValue)
  73. }
  74. }
  75. }
  76. }
  77. }
  78. struct ErrorDetailsView: View {
  79. var error: Error
  80. var body: some View {
  81. NavigationView {
  82. Form {
  83. switch error {
  84. case let GenerateContentError.internalError(underlying: underlyingError):
  85. Section("Error Type") {
  86. Text("Internal error")
  87. }
  88. Section("Details") {
  89. SubtitleFormRow(title: "Error description",
  90. value: underlyingError.localizedDescription)
  91. }
  92. case let GenerateContentError.promptBlocked(response: generateContentResponse):
  93. Section("Error Type") {
  94. Text("Your prompt was blocked")
  95. }
  96. Section("Details") {
  97. if let reason = generateContentResponse.promptFeedback?.blockReason {
  98. SubtitleFormRow(title: "Reason for blocking", value: reason.rawValue)
  99. }
  100. if let text = generateContentResponse.text {
  101. SubtitleMarkdownFormRow(title: "Last chunk for the response", value: text)
  102. }
  103. }
  104. if let ratings = generateContentResponse.candidates.first?.safetyRatings {
  105. SafetyRatingsSection(ratings: ratings)
  106. }
  107. case let GenerateContentError.responseStoppedEarly(
  108. reason: finishReason,
  109. response: generateContentResponse
  110. ):
  111. Section("Error Type") {
  112. Text("Response stopped early")
  113. }
  114. Section("Details") {
  115. SubtitleFormRow(title: "Reason for finishing early", value: finishReason.rawValue)
  116. if let text = generateContentResponse.text {
  117. SubtitleMarkdownFormRow(title: "Last chunk for the response", value: text)
  118. }
  119. }
  120. if let ratings = generateContentResponse.candidates.first?.safetyRatings {
  121. SafetyRatingsSection(ratings: ratings)
  122. }
  123. default:
  124. Section("Error Type") {
  125. Text("Some other error")
  126. }
  127. Section("Details") {
  128. SubtitleFormRow(title: "Error description", value: error.localizedDescription)
  129. }
  130. }
  131. }
  132. .navigationTitle("Error details")
  133. .navigationBarTitleDisplayMode(.inline)
  134. }
  135. }
  136. }
  137. #Preview("Response Stopped Early") {
  138. let error = GenerateContentError.responseStoppedEarly(
  139. reason: .maxTokens,
  140. response: GenerateContentResponse(candidates: [
  141. CandidateResponse(content: ModelContent(role: "model", parts:
  142. """
  143. A _hypothetical_ model response.
  144. Cillum ex aliqua amet aliquip labore amet eiusmod consectetur reprehenderit sit commodo.
  145. """),
  146. safetyRatings: [
  147. SafetyRating(category: .dangerousContent, probability: .high),
  148. SafetyRating(category: .harassment, probability: .low),
  149. SafetyRating(category: .hateSpeech, probability: .low),
  150. SafetyRating(category: .sexuallyExplicit, probability: .low),
  151. ],
  152. finishReason: FinishReason.maxTokens,
  153. citationMetadata: nil),
  154. ])
  155. )
  156. return ErrorDetailsView(error: error)
  157. }
  158. #Preview("Prompt Blocked") {
  159. let error = GenerateContentError.promptBlocked(
  160. response: GenerateContentResponse(candidates: [
  161. CandidateResponse(content: ModelContent(role: "model", parts:
  162. """
  163. A _hypothetical_ model response.
  164. Cillum ex aliqua amet aliquip labore amet eiusmod consectetur reprehenderit sit commodo.
  165. """),
  166. safetyRatings: [
  167. SafetyRating(category: .dangerousContent, probability: .high),
  168. SafetyRating(category: .harassment, probability: .low),
  169. SafetyRating(category: .hateSpeech, probability: .low),
  170. SafetyRating(category: .sexuallyExplicit, probability: .low),
  171. ],
  172. finishReason: FinishReason.other,
  173. citationMetadata: nil),
  174. ])
  175. )
  176. return ErrorDetailsView(error: error)
  177. }