diff --git a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenFileDataImage.swift b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGCSImage.swift similarity index 90% rename from FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenFileDataImage.swift rename to FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGCSImage.swift index 4c87ad84f44..36a6337f0b1 100644 --- a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenFileDataImage.swift +++ b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGCSImage.swift @@ -15,7 +15,7 @@ import Foundation @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -public struct ImagenFileDataImage { +public struct ImagenGCSImage { public let mimeType: String public let gcsURI: String @@ -26,7 +26,7 @@ public struct ImagenFileDataImage { } @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenFileDataImage: ImagenImageRepresentable { +extension ImagenGCSImage: ImagenImageRepresentable { // TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as // input (upscaling / editing). var _internalImagenImage: _InternalImagenImage { @@ -35,12 +35,12 @@ extension ImagenFileDataImage: ImagenImageRepresentable { } @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenFileDataImage: Equatable {} +extension ImagenGCSImage: Equatable {} // MARK: - Codable Conformances @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenFileDataImage: Decodable { +extension ImagenGCSImage: Decodable { enum CodingKeys: String, CodingKey { case mimeType case gcsURI = "gcsUri" diff --git a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationConfig.swift b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationConfig.swift index 62050bb59d0..a36df61eb0d 100644 --- a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationConfig.swift +++ b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationConfig.swift @@ -14,14 +14,14 @@ @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) public struct ImagenGenerationConfig { - public var numberOfImages: Int? public var negativePrompt: String? - public var imageFormat: ImagenImageFormat? + public var numberOfImages: Int? public var aspectRatio: ImagenAspectRatio? + public var imageFormat: ImagenImageFormat? public var addWatermark: Bool? - public init(numberOfImages: Int? = nil, negativePrompt: String? = nil, - imageFormat: ImagenImageFormat? = nil, aspectRatio: ImagenAspectRatio? = nil, + public init(negativePrompt: String? = nil, numberOfImages: Int? = nil, + aspectRatio: ImagenAspectRatio? = nil, imageFormat: ImagenImageFormat? = nil, addWatermark: Bool? = nil) { self.numberOfImages = numberOfImages self.negativePrompt = negativePrompt diff --git a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineDataImage.swift b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineImage.swift similarity index 91% rename from FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineDataImage.swift rename to FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineImage.swift index 2a1b6f9ed27..152c17ddd0a 100644 --- a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineDataImage.swift +++ b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineImage.swift @@ -15,7 +15,7 @@ import Foundation @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -public struct ImagenInlineDataImage { +public struct ImagenInlineImage { public let mimeType: String public let data: Data @@ -30,7 +30,7 @@ public struct ImagenInlineDataImage { } @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenInlineDataImage: ImagenImageRepresentable { +extension ImagenInlineImage: ImagenImageRepresentable { // TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as // input (upscaling / editing). var _internalImagenImage: _InternalImagenImage { @@ -43,12 +43,12 @@ extension ImagenInlineDataImage: ImagenImageRepresentable { } @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenInlineDataImage: Equatable {} +extension ImagenInlineImage: Equatable {} // MARK: - Codable Conformances @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -extension ImagenInlineDataImage: Decodable { +extension ImagenInlineImage: Decodable { enum CodingKeys: CodingKey { case mimeType case bytesBase64Encoded diff --git a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModel.swift b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModel.swift index 785e0fd6e80..b136a588ab6 100644 --- a/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModel.swift +++ b/FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModel.swift @@ -24,6 +24,8 @@ public final class ImagenModel { /// The backing service responsible for sending and receiving model requests to the backend. let generativeAIService: GenerativeAIService + let generationConfig: ImagenGenerationConfig? + let safetySettings: ImagenSafetySettings? /// Configuration parameters for sending requests to the backend. @@ -32,6 +34,7 @@ public final class ImagenModel { init(name: String, projectID: String, apiKey: String, + generationConfig: ImagenGenerationConfig?, safetySettings: ImagenSafetySettings?, requestOptions: RequestOptions, appCheck: AppCheckInterop?, @@ -45,13 +48,13 @@ public final class ImagenModel { auth: auth, urlSession: urlSession ) + self.generationConfig = generationConfig self.safetySettings = safetySettings self.requestOptions = requestOptions } - public func generateImages(prompt: String, - generationConfig: ImagenGenerationConfig? = nil) async throws - -> ImagenGenerationResponse { + public func generateImages(prompt: String) async throws + -> ImagenGenerationResponse { return try await generateImages( prompt: prompt, parameters: ImagenModel.imageGenerationParameters( @@ -62,13 +65,12 @@ public final class ImagenModel { ) } - public func generateImages(prompt: String, storageURI: String, - generationConfig: ImagenGenerationConfig? = nil) async throws - -> ImagenGenerationResponse { + public func generateImages(prompt: String, gcsUri: String) async throws + -> ImagenGenerationResponse { return try await generateImages( prompt: prompt, parameters: ImagenModel.imageGenerationParameters( - storageURI: storageURI, + storageURI: gcsUri, generationConfig: generationConfig, safetySettings: safetySettings ) diff --git a/FirebaseVertexAI/Sources/VertexAI.swift b/FirebaseVertexAI/Sources/VertexAI.swift index a2d3c62f529..c80db0c2321 100644 --- a/FirebaseVertexAI/Sources/VertexAI.swift +++ b/FirebaseVertexAI/Sources/VertexAI.swift @@ -104,12 +104,14 @@ public class VertexAI { ) } - public func imagenModel(modelName: String, safetySettings: ImagenSafetySettings? = nil, + public func imagenModel(modelName: String, generationConfig: ImagenGenerationConfig? = nil, + safetySettings: ImagenSafetySettings? = nil, requestOptions: RequestOptions = RequestOptions()) -> ImagenModel { return ImagenModel( name: modelResourceName(modelName: modelName), projectID: projectID, apiKey: apiKey, + generationConfig: generationConfig, safetySettings: safetySettings, requestOptions: requestOptions, appCheck: appCheck, diff --git a/FirebaseVertexAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift b/FirebaseVertexAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift index bdcde8a9a2a..bd41e22c331 100644 --- a/FirebaseVertexAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift +++ b/FirebaseVertexAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift @@ -39,6 +39,11 @@ final class IntegrationTests: XCTestCase { SafetySetting(harmCategory: .civicIntegrity, threshold: .blockLowAndAbove), ] + let imagenGenerationConfig = ImagenGenerationConfig( + aspectRatio: .landscape16x9, + imageFormat: .jpeg(compressionQuality: 70) + ) + var vertex: VertexAI! var model: GenerativeModel! var imagenModel: ImagenModel! @@ -63,6 +68,7 @@ final class IntegrationTests: XCTestCase { ) imagenModel = vertex.imagenModel( modelName: "imagen-3.0-fast-generate-001", + generationConfig: imagenGenerationConfig, safetySettings: ImagenSafetySettings( safetyFilterLevel: .blockLowAndAbove, personFilterLevel: .blockAll @@ -253,15 +259,8 @@ final class IntegrationTests: XCTestCase { overlooking a vast African savanna at sunset. Golden hour light, long shadows, sharp focus on the lion, shallow depth of field, detailed fur texture, DSLR, 85mm lens. """ - let generationConfig = ImagenGenerationConfig( - imageFormat: .jpeg(compressionQuality: 70), - aspectRatio: .landscape16x9 - ) - let response = try await imagenModel.generateImages( - prompt: imagePrompt, - generationConfig: generationConfig - ) + let response = try await imagenModel.generateImages(prompt: imagePrompt) XCTAssertNil(response.filteredReason) XCTAssertEqual(response.images.count, 1) diff --git a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImageGenerationParametersTests.swift b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImageGenerationParametersTests.swift index d0908ae81ee..028356c7433 100644 --- a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImageGenerationParametersTests.swift +++ b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImageGenerationParametersTests.swift @@ -77,10 +77,10 @@ final class ImageGenerationParametersTests: XCTestCase { let aspectRatio = ImagenAspectRatio.landscape16x9 let addWatermark = true let generationConfig = ImagenGenerationConfig( - numberOfImages: sampleCount, negativePrompt: negativePrompt, - imageFormat: imageFormat, + numberOfImages: sampleCount, aspectRatio: aspectRatio, + imageFormat: imageFormat, addWatermark: addWatermark ) let expectedParameters = ImageGenerationParameters( @@ -146,10 +146,10 @@ final class ImageGenerationParametersTests: XCTestCase { let aspectRatio = ImagenAspectRatio.portrait3x4 let addWatermark = false let generationConfig = ImagenGenerationConfig( - numberOfImages: sampleCount, negativePrompt: negativePrompt, - imageFormat: imageFormat, + numberOfImages: sampleCount, aspectRatio: aspectRatio, + imageFormat: imageFormat, addWatermark: addWatermark ) let safetyFilterLevel = ImagenSafetyFilterLevel.blockNone diff --git a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenFileDataImageTests.swift b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGCSImageTests.swift similarity index 83% rename from FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenFileDataImageTests.swift rename to FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGCSImageTests.swift index f3e1bdc458d..badd1df5461 100644 --- a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenFileDataImageTests.swift +++ b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGCSImageTests.swift @@ -17,7 +17,7 @@ import XCTest @testable import FirebaseVertexAI @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -final class ImagenFileDataImageTests: XCTestCase { +final class ImagenGCSImageTests: XCTestCase { let decoder = JSONDecoder() func testDecodeImage_gcsURI() throws { @@ -31,7 +31,7 @@ final class ImagenFileDataImageTests: XCTestCase { """ let jsonData = try XCTUnwrap(json.data(using: .utf8)) - let image = try decoder.decode(ImagenFileDataImage.self, from: jsonData) + let image = try decoder.decode(ImagenGCSImage.self, from: jsonData) XCTAssertEqual(image.mimeType, mimeType) XCTAssertEqual(image.gcsURI, gcsURI) @@ -49,10 +49,10 @@ final class ImagenFileDataImageTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) do { - _ = try decoder.decode(ImagenFileDataImage.self, from: jsonData) + _ = try decoder.decode(ImagenGCSImage.self, from: jsonData) XCTFail("Expected an error; none thrown.") } catch let DecodingError.keyNotFound(codingKey, _) { - let codingKey = try XCTUnwrap(codingKey as? ImagenFileDataImage.CodingKeys) + let codingKey = try XCTUnwrap(codingKey as? ImagenGCSImage.CodingKeys) XCTAssertEqual(codingKey, .gcsURI) } catch { XCTFail("Expected a DecodingError.keyNotFound error; got \(error).") @@ -68,10 +68,10 @@ final class ImagenFileDataImageTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) do { - _ = try decoder.decode(ImagenFileDataImage.self, from: jsonData) + _ = try decoder.decode(ImagenGCSImage.self, from: jsonData) XCTFail("Expected an error; none thrown.") } catch let DecodingError.keyNotFound(codingKey, _) { - let codingKey = try XCTUnwrap(codingKey as? ImagenFileDataImage.CodingKeys) + let codingKey = try XCTUnwrap(codingKey as? ImagenGCSImage.CodingKeys) XCTAssertEqual(codingKey, .mimeType) } catch { XCTFail("Expected a DecodingError.keyNotFound error; got \(error).") diff --git a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationRequestTests.swift b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationRequestTests.swift index bc51120cf6c..14c59393ef6 100644 --- a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationRequestTests.swift +++ b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationRequestTests.swift @@ -44,7 +44,7 @@ final class ImagenGenerationRequestTests: XCTestCase { } func testInitializeRequest_inlineDataImage() throws { - let request = ImagenGenerationRequest( + let request = ImagenGenerationRequest( model: modelName, options: requestOptions, instances: [instance], @@ -62,7 +62,7 @@ final class ImagenGenerationRequestTests: XCTestCase { } func testInitializeRequest_fileDataImage() throws { - let request = ImagenGenerationRequest( + let request = ImagenGenerationRequest( model: modelName, options: requestOptions, instances: [instance], @@ -82,7 +82,7 @@ final class ImagenGenerationRequestTests: XCTestCase { // MARK: - Encoding Tests func testEncodeRequest_inlineDataImage() throws { - let request = ImagenGenerationRequest( + let request = ImagenGenerationRequest( model: modelName, options: RequestOptions(), instances: [instance], @@ -110,7 +110,7 @@ final class ImagenGenerationRequestTests: XCTestCase { } func testEncodeRequest_fileDataImage() throws { - let request = ImagenGenerationRequest( + let request = ImagenGenerationRequest( model: modelName, options: RequestOptions(), instances: [instance], diff --git a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationResponseTests.swift b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationResponseTests.swift index 9f703ee9b0f..2dc1fc3008b 100644 --- a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationResponseTests.swift +++ b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenGenerationResponseTests.swift @@ -23,7 +23,7 @@ final class ImagenGenerationResponseTests: XCTestCase { func testDecodeResponse_oneBase64Image_noneFiltered() throws { let mimeType = "image/png" let bytesBase64Encoded = "dGVzdC1iYXNlNjQtZGF0YQ==" - let image = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded) + let image = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded) let json = """ { "predictions": [ @@ -37,7 +37,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -50,9 +50,9 @@ final class ImagenGenerationResponseTests: XCTestCase { let bytesBase64Encoded1 = "dGVzdC1iYXNlNjQtYnl0ZXMtMQ==" let bytesBase64Encoded2 = "dGVzdC1iYXNlNjQtYnl0ZXMtMg==" let bytesBase64Encoded3 = "dGVzdC1iYXNlNjQtYnl0ZXMtMw==" - let image1 = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded1) - let image2 = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded2) - let image3 = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded3) + let image1 = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded1) + let image2 = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded2) + let image3 = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded3) let json = """ { "predictions": [ @@ -74,7 +74,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -86,8 +86,8 @@ final class ImagenGenerationResponseTests: XCTestCase { let mimeType = "image/png" let bytesBase64Encoded1 = "dGVzdC1iYXNlNjQtYnl0ZXMtMQ==" let bytesBase64Encoded2 = "dGVzdC1iYXNlNjQtYnl0ZXMtMg==" - let image1 = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded1) - let image2 = ImagenInlineDataImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded2) + let image1 = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded1) + let image2 = ImagenInlineImage(mimeType: mimeType, bytesBase64Encoded: bytesBase64Encoded2) let raiFilteredReason = """ Your current safety filter threshold filtered out 2 generated images. You will not be charged \ for blocked images. Try rephrasing the prompt. If you think this was an error, send feedback. @@ -112,7 +112,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -124,8 +124,8 @@ final class ImagenGenerationResponseTests: XCTestCase { let mimeType = "image/png" let gcsURI1 = "gs://test-bucket/images/123456789/sample_0.png" let gcsURI2 = "gs://test-bucket/images/123456789/sample_1.png" - let image1 = ImagenFileDataImage(mimeType: mimeType, gcsURI: gcsURI1) - let image2 = ImagenFileDataImage(mimeType: mimeType, gcsURI: gcsURI2) + let image1 = ImagenGCSImage(mimeType: mimeType, gcsURI: gcsURI1) + let image2 = ImagenGCSImage(mimeType: mimeType, gcsURI: gcsURI2) let json = """ { "predictions": [ @@ -143,7 +143,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -169,7 +169,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -182,7 +182,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -208,7 +208,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) @@ -230,7 +230,7 @@ final class ImagenGenerationResponseTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let response = try decoder.decode( - ImagenGenerationResponse.self, + ImagenGenerationResponse.self, from: jsonData ) diff --git a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineDataImageTests.swift b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineImageTests.swift similarity index 83% rename from FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineDataImageTests.swift rename to FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineImageTests.swift index 697fdc01a3c..fa8072ee497 100644 --- a/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineDataImageTests.swift +++ b/FirebaseVertexAI/Tests/Unit/Types/Imagen/ImagenInlineImageTests.swift @@ -17,7 +17,7 @@ import XCTest @testable import FirebaseVertexAI @available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -final class ImagenInlineDataImageTests: XCTestCase { +final class ImagenInlineImageTests: XCTestCase { let decoder = JSONDecoder() func testDecodeImage_bytesBase64Encoded() throws { @@ -31,7 +31,7 @@ final class ImagenInlineDataImageTests: XCTestCase { """ let jsonData = try XCTUnwrap(json.data(using: .utf8)) - let image = try decoder.decode(ImagenInlineDataImage.self, from: jsonData) + let image = try decoder.decode(ImagenInlineImage.self, from: jsonData) XCTAssertEqual(image.mimeType, mimeType) XCTAssertEqual(image.data.base64EncodedString(), bytesBase64Encoded) @@ -49,10 +49,10 @@ final class ImagenInlineDataImageTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) do { - _ = try decoder.decode(ImagenInlineDataImage.self, from: jsonData) + _ = try decoder.decode(ImagenInlineImage.self, from: jsonData) XCTFail("Expected an error; none thrown.") } catch let DecodingError.keyNotFound(codingKey, _) { - let codingKey = try XCTUnwrap(codingKey as? ImagenInlineDataImage.CodingKeys) + let codingKey = try XCTUnwrap(codingKey as? ImagenInlineImage.CodingKeys) XCTAssertEqual(codingKey, .bytesBase64Encoded) } catch { XCTFail("Expected a DecodingError.keyNotFound error; got \(error).") @@ -68,10 +68,10 @@ final class ImagenInlineDataImageTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) do { - _ = try decoder.decode(ImagenInlineDataImage.self, from: jsonData) + _ = try decoder.decode(ImagenInlineImage.self, from: jsonData) XCTFail("Expected an error; none thrown.") } catch let DecodingError.keyNotFound(codingKey, _) { - let codingKey = try XCTUnwrap(codingKey as? ImagenInlineDataImage.CodingKeys) + let codingKey = try XCTUnwrap(codingKey as? ImagenInlineImage.CodingKeys) XCTAssertEqual(codingKey, .mimeType) } catch { XCTFail("Expected a DecodingError.keyNotFound error; got \(error).")