From 12af6ba5ed9d379b3e962202c0ed927aca7a778f Mon Sep 17 00:00:00 2001 From: Mel <78050250+mludowise-stripe@users.noreply.github.com> Date: Wed, 15 Jun 2022 09:31:57 -0700 Subject: [PATCH] Save selfie to backend (#1209) Saves the selfie uploaded file IDs and metadata --- .../Source/CameraExifMetadata.swift | 35 ++++++-------- .../StripeIdentity.xcodeproj/project.pbxproj | 6 ++- .../API Bindings/IdentityAPIClient.swift | 22 +++++++++ .../VerificationPageClearData.swift | 11 +++++ .../VerificationPageCollectedData.swift | 13 +++++ .../VerificationPageDataFace.swift | 45 ++++++++++++++++++ .../VerificationPageDataUpdate.swift | 9 ++++ .../API Bindings/SelfieUploader+API.swift | 37 +++++++++++++++ .../FaceScanner/FaceCaptureData.swift | 4 ++ .../ImageScanningSession.swift | 4 ++ .../ImageUploaders/DocumentUploader.swift | 47 +++++++++++++------ .../IdentityImageUploader.swift | 15 +++--- .../ImageUploaders/SelfieUploader.swift | 43 +++++++---------- .../VerificationSheetController.swift | 20 ++++++-- .../SelfieCaptureViewController.swift | 12 ++++- .../VerificationSheetControllerMock.swift | 3 +- .../VerificationSheetControllerTest.swift | 1 + modules.yaml | 2 +- 18 files changed, 251 insertions(+), 78 deletions(-) create mode 100644 StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataFace.swift diff --git a/StripeCameraCore/StripeCameraCore/Source/CameraExifMetadata.swift b/StripeCameraCore/StripeCameraCore/Source/CameraExifMetadata.swift index ecf048781f8..a6888b71a6c 100644 --- a/StripeCameraCore/StripeCameraCore/Source/CameraExifMetadata.swift +++ b/StripeCameraCore/StripeCameraCore/Source/CameraExifMetadata.swift @@ -10,33 +10,26 @@ import ImageIO import CoreMedia /// A helper to extract properties from an EXIF metadata dictionary -@_spi(STP) public struct CameraExifMetadata { - public let exifDictionary: [CFString: Any] - - // MARK: - Init +@_spi(STP) public struct CameraExifMetadata: Equatable { + public let brightnessValue: Double? + public let focalLength: Double? + public let lensModel: String? +} - public init?(exifDictionary: [CFString: Any]?) { +public extension CameraExifMetadata { + init?(exifDictionary: [CFString: Any]?) { guard let exifDictionary = exifDictionary else { return nil } - self.exifDictionary = exifDictionary - } - - public init?(sampleBuffer: CMSampleBuffer) { - self.init(exifDictionary: CMGetAttachment(sampleBuffer, key: kCGImagePropertyExifDictionary, attachmentModeOut: nil) as? [CFString: Any]) - } - - // MARK: - Computed Properties - public var brightnessValue: Double? { - return exifDictionary[kCGImagePropertyExifBrightnessValue] as? Double + self.init( + brightnessValue: exifDictionary[kCGImagePropertyExifBrightnessValue] as? Double, + focalLength: exifDictionary[kCGImagePropertyExifFocalLength] as? Double, + lensModel: exifDictionary[kCGImagePropertyExifLensModel] as? String + ) } - public var lensModel: String? { - return exifDictionary[kCGImagePropertyExifLensModel] as? String - } - - public var focalLength: Double? { - return exifDictionary[kCGImagePropertyExifFocalLength] as? Double + init?(sampleBuffer: CMSampleBuffer) { + self.init(exifDictionary: CMGetAttachment(sampleBuffer, key: kCGImagePropertyExifDictionary, attachmentModeOut: nil) as? [CFString: Any]) } } diff --git a/StripeIdentity/StripeIdentity.xcodeproj/project.pbxproj b/StripeIdentity/StripeIdentity.xcodeproj/project.pbxproj index 17d6bdb3a4c..90f81793f3c 100644 --- a/StripeIdentity/StripeIdentity.xcodeproj/project.pbxproj +++ b/StripeIdentity/StripeIdentity.xcodeproj/project.pbxproj @@ -22,6 +22,7 @@ E61676FF2850023100C9E44A /* IdentityAnalytic.swift in Sources */ = {isa = PBXBuildFile; fileRef = E61676FE2850023100C9E44A /* IdentityAnalytic.swift */; }; E616770228500F3400C9E44A /* VerificationSheetController+Analytics.swift in Sources */ = {isa = PBXBuildFile; fileRef = E616770128500F3400C9E44A /* VerificationSheetController+Analytics.swift */; }; E61676F9284FFBB500C9E44A /* TimeInterval+StripeIdentity.swift in Sources */ = {isa = PBXBuildFile; fileRef = E61676F8284FFBB500C9E44A /* TimeInterval+StripeIdentity.swift */; }; + E616770A2853F6A700C9E44A /* VerificationPageDataFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = E61677092853F6A700C9E44A /* VerificationPageDataFace.swift */; }; E616770228500F3400C9E44A /* IdentityAnalyticsClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = E616770128500F3400C9E44A /* IdentityAnalyticsClient.swift */; }; E61ADAD3270F6293004ED998 /* VerificationSheetController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E61ADAD2270F6293004ED998 /* VerificationSheetController.swift */; }; E61C32462797AEA2008A30D4 /* DocumentFileUploadViewControllerTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = E61C32452797AEA2008A30D4 /* DocumentFileUploadViewControllerTest.swift */; }; @@ -240,6 +241,7 @@ E616770128500F3400C9E44A /* VerificationSheetController+Analytics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VerificationSheetController+Analytics.swift"; sourceTree = ""; }; E61676F8284FFBB500C9E44A /* TimeInterval+StripeIdentity.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "TimeInterval+StripeIdentity.swift"; sourceTree = ""; }; E616770128500F3400C9E44A /* IdentityAnalyticsClient.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IdentityAnalyticsClient.swift; sourceTree = ""; }; + E61677092853F6A700C9E44A /* VerificationPageDataFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VerificationPageDataFace.swift; sourceTree = ""; }; E61ADAD2270F6293004ED998 /* VerificationSheetController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VerificationSheetController.swift; sourceTree = ""; }; E61C32452797AEA2008A30D4 /* DocumentFileUploadViewControllerTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DocumentFileUploadViewControllerTest.swift; sourceTree = ""; }; E61EA92926A0D89900CAEE52 /* FBSnapshotTestCase.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = FBSnapshotTestCase.xcframework; path = ../Carthage/Build/FBSnapshotTestCase.xcframework; sourceTree = ""; }; @@ -739,9 +741,10 @@ E6548F5D2731E4D500F399B2 /* VerificationPageDataUpdate */ = { isa = PBXGroup; children = ( - E6B906ED27CAB4F200D0A703 /* VerificationPageCollectedData.swift */, E6B9071127D1C95B00D0A703 /* VerificationPageClearData.swift */, + E6B906ED27CAB4F200D0A703 /* VerificationPageCollectedData.swift */, E657B57F276416FD00134033 /* VerificationPageDataDocumentFileData.swift */, + E61677092853F6A700C9E44A /* VerificationPageDataFace.swift */, E6548F5E2731E4E500F399B2 /* VerificationPageDataUpdate.swift */, ); path = VerificationPageDataUpdate; @@ -1366,6 +1369,7 @@ E6A50DC027B785B800D7BDED /* HTMLViewWithIconLabels.swift in Sources */, E6548F522731D9B400F399B2 /* VerificationPageDataRequirements.swift in Sources */, E61676F9284FFBB500C9E44A /* TimeInterval+StripeIdentity.swift in Sources */, + E616770A2853F6A700C9E44A /* VerificationPageDataFace.swift in Sources */, E657B580276416FD00134033 /* VerificationPageDataDocumentFileData.swift in Sources */, E61676E62849894600C9E44A /* VerificationPageStaticContentSelfieModels.swift in Sources */, E6C7BB2527A8BE2E000807A6 /* MLModelUnexpectedOutputError.swift in Sources */, diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/IdentityAPIClient.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/IdentityAPIClient.swift index a2687123ceb..e00737dfeca 100644 --- a/StripeIdentity/StripeIdentity/Source/API Bindings/IdentityAPIClient.swift +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/IdentityAPIClient.swift @@ -86,6 +86,28 @@ final class IdentityAPIClientImpl: IdentityAPIClient { func updateIdentityVerificationPageData( updating verificationData: StripeAPI.VerificationPageDataUpdate ) -> Promise { + // TODO(mludowise|IDPROD-4030): Remove API v1 check when selfie is production ready + guard apiVersion > 1 else { + // Translate into v1 API models to avoid API error + return apiClient.post( + resource: APIEndpointVerificationPageData(id: verificationSessionId), + object: StripeAPI.VerificationPageDataUpdateV1( + clearData: .init( + biometricConsent: verificationData.clearData?.biometricConsent, + idDocumentBack: verificationData.clearData?.idDocumentBack, + idDocumentFront: verificationData.clearData?.idDocumentFront, + idDocumentType: verificationData.clearData?.idDocumentType + ), + collectedData: .init( + biometricConsent: verificationData.collectedData?.biometricConsent, + idDocumentBack: verificationData.collectedData?.idDocumentBack, + idDocumentFront: verificationData.collectedData?.idDocumentFront, + idDocumentType: verificationData.collectedData?.idDocumentType + ) + ) + ) + } + return apiClient.post( resource: APIEndpointVerificationPageData(id: verificationSessionId), object: verificationData diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageClearData.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageClearData.swift index 69999008fe5..9451e01724e 100644 --- a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageClearData.swift +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageClearData.swift @@ -10,6 +10,16 @@ import Foundation extension StripeAPI { struct VerificationPageClearData: Encodable, Equatable { + let biometricConsent: Bool? + let face: Bool? + let idDocumentBack: Bool? + let idDocumentFront: Bool? + let idDocumentType: Bool? + } + + // TODO(mludowise|IDPROD-4030): Remove v1 API models when selfie is production ready + /// API model compatible with V1 Identity endpoints that won't encode a `face` property + struct VerificationPageClearDataV1: Encodable, Equatable { let biometricConsent: Bool? let idDocumentBack: Bool? let idDocumentFront: Bool? @@ -21,6 +31,7 @@ extension StripeAPI.VerificationPageClearData { init(clearFields fields: Set) { self.init( biometricConsent: fields.contains(.biometricConsent), + face: fields.contains(.face), idDocumentBack: fields.contains(.idDocumentBack), idDocumentFront: fields.contains(.idDocumentFront), idDocumentType: fields.contains(.idDocumentType) diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageCollectedData.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageCollectedData.swift index 03cb570e8c7..e312da7ba52 100644 --- a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageCollectedData.swift +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageCollectedData.swift @@ -12,22 +12,34 @@ extension StripeAPI { struct VerificationPageCollectedData: Encodable, Equatable { let biometricConsent: Bool? + let face: VerificationPageDataFace? let idDocumentBack: VerificationPageDataDocumentFileData? let idDocumentFront: VerificationPageDataDocumentFileData? let idDocumentType: DocumentType? init( biometricConsent: Bool? = nil, + face: VerificationPageDataFace? = nil, idDocumentBack: VerificationPageDataDocumentFileData? = nil, idDocumentFront: VerificationPageDataDocumentFileData? = nil, idDocumentType: DocumentType? = nil ) { self.biometricConsent = biometricConsent + self.face = face self.idDocumentBack = idDocumentBack self.idDocumentFront = idDocumentFront self.idDocumentType = idDocumentType } } + + // TODO(mludowise|IDPROD-4030): Remove v1 API models when selfie is production ready + /// API model compatible with V1 Identity endpoints that won't encode a `face` property + struct VerificationPageCollectedDataV1: Encodable, Equatable { + let biometricConsent: Bool? + let idDocumentBack: VerificationPageDataDocumentFileData? + let idDocumentFront: VerificationPageDataDocumentFileData? + let idDocumentType: DocumentType? + } } extension StripeAPI.VerificationPageCollectedData { @@ -38,6 +50,7 @@ extension StripeAPI.VerificationPageCollectedData { func merging(_ otherData: StripeAPI.VerificationPageCollectedData) -> StripeAPI.VerificationPageCollectedData { return StripeAPI.VerificationPageCollectedData( biometricConsent: otherData.biometricConsent ?? self.biometricConsent, + face: otherData.face ?? self.face, idDocumentBack: otherData.idDocumentBack ?? self.idDocumentBack, idDocumentFront: otherData.idDocumentFront ?? self.idDocumentFront, idDocumentType: otherData.idDocumentType ?? self.idDocumentType diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataFace.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataFace.swift new file mode 100644 index 00000000000..25550f9efb1 --- /dev/null +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataFace.swift @@ -0,0 +1,45 @@ +// +// VerificationPageDataFace.swift +// StripeIdentity +// +// Created by Mel Ludowise on 6/10/22. +// + +import Foundation +@_spi(STP) import StripeCore + +extension StripeAPI { + struct VerificationPageDataFace: Encodable, Equatable { + + /// File ID of uploaded image for best selfie frame. This will be cropped to the bounds of the face in the image. + let bestHighResImage: String + /// File ID of uploaded image for best selfie frame. This will be un-cropped. + let bestLowResImage: String + /// File ID of uploaded image for first selfie frame. This will be cropped to the bounds of the face in the image. + let firstHighResImage: String + /// File ID of uploaded image for first selfie frame. This will be un-cropped. + let firstLowResImage: String + /// File ID of uploaded image for last selfie frame. This will be cropped to the bounds of the face in the image. + let lastHighResImage: String + /// File ID of uploaded image for last selfie frame. This will be un-cropped. + let lastLowResImage: String + /// FaceDetector score for the best selfie frame. + let bestFaceScore: TwoDecimalFloat + /// Variance of the FaceDetector scores over all selfie frames. + let faceScoreVariance: TwoDecimalFloat + /// The total number of selfie frames taken. + let numFrames: Int + /// Camera brightness value for the best selfie frame. + let bestBrightnessValue: TwoDecimalFloat? + /// Camera lens model for the best selfie frame. + let bestCameraLensModel: String? + /// Camera exposure duration for the best selfie frame. + let bestExposureDuration: Int? + /// Camera exposure ISO for the best selfie frame + let bestExposureIso: TwoDecimalFloat? + /// Camera focal length for the best selfie frame. + let bestFocalLength: TwoDecimalFloat? + /// If the best selfie frame was taken by a virtual camera. + let bestIsVirtualCamera: Bool? + } +} diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataUpdate.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataUpdate.swift index 84e975da439..e62d5781c49 100644 --- a/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataUpdate.swift +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/Models/VerificationPageDataUpdate/VerificationPageDataUpdate.swift @@ -14,4 +14,13 @@ extension StripeAPI { let clearData: VerificationPageClearData? let collectedData: VerificationPageCollectedData? } + + // TODO(mludowise|IDPROD-4030): Remove v1 API models when selfie is production ready + /// API model compatible with V1 Identity endpoints that won't encode a `face` property + struct VerificationPageDataUpdateV1: Encodable, Equatable { + + let clearData: VerificationPageClearDataV1? + let collectedData: VerificationPageCollectedDataV1? + } + } diff --git a/StripeIdentity/StripeIdentity/Source/API Bindings/SelfieUploader+API.swift b/StripeIdentity/StripeIdentity/Source/API Bindings/SelfieUploader+API.swift index 3494ee78979..5e593b81bb5 100644 --- a/StripeIdentity/StripeIdentity/Source/API Bindings/SelfieUploader+API.swift +++ b/StripeIdentity/StripeIdentity/Source/API Bindings/SelfieUploader+API.swift @@ -7,6 +7,7 @@ import Foundation import UIKit +@_spi(STP) import StripeCore @_spi(STP) import StripeCameraCore extension IdentityImageUploader.Configuration { @@ -21,3 +22,39 @@ extension IdentityImageUploader.Configuration { ) } } + +extension StripeAPI.VerificationPageDataFace { + init( + uploadedFiles: SelfieUploader.FileData, + capturedImages: FaceCaptureData, + bestFrameExifMetadata: CameraExifMetadata?, + trainingConsent: Bool? + ) { + // TODO(mludowise|IDPROD-4088): Save training consent when API is updated + self.init( + bestHighResImage: uploadedFiles.bestHighResFile.id, + bestLowResImage: uploadedFiles.bestLowResFile.id, + firstHighResImage: uploadedFiles.firstHighResFile.id, + firstLowResImage: uploadedFiles.firstLowResFile.id, + lastHighResImage: uploadedFiles.lastHighResFile.id, + lastLowResImage: uploadedFiles.lastLowResFile.id, + bestFaceScore: .init(capturedImages.bestMiddle.scannerOutput.faceScore), + faceScoreVariance: .init(capturedImages.faceScoreVariance), + numFrames: capturedImages.numSamples, + bestBrightnessValue: bestFrameExifMetadata?.brightnessValue.map { + TwoDecimalFloat(double: $0) + }, + bestCameraLensModel: bestFrameExifMetadata?.lensModel, + bestExposureDuration: capturedImages.bestMiddle.scannerOutput.cameraProperties.map { + Int($0.exposureDuration.seconds * 1000) + }, + bestExposureIso: capturedImages.bestMiddle.scannerOutput.cameraProperties.map { + TwoDecimalFloat($0.exposureISO) + }, + bestFocalLength: bestFrameExifMetadata?.focalLength.map { + TwoDecimalFloat(double: $0) + }, + bestIsVirtualCamera: capturedImages.bestMiddle.scannerOutput.cameraProperties?.isVirtualDevice + ) + } +} diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanner/FaceScanner/FaceCaptureData.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanner/FaceScanner/FaceCaptureData.swift index 2de71b31896..3073c32392c 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanner/FaceScanner/FaceCaptureData.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanner/FaceScanner/FaceCaptureData.swift @@ -7,10 +7,12 @@ import Foundation import CoreGraphics +@_spi(STP) import StripeCameraCore struct FaceScannerInputOutput: Equatable { let image: CGImage let scannerOutput: FaceScannerOutput + let cameraExifMetadata: CameraExifMetadata? } struct FaceCaptureData: Equatable { @@ -18,6 +20,7 @@ struct FaceCaptureData: Equatable { let last: FaceScannerInputOutput let bestMiddle: FaceScannerInputOutput + let numSamples: Int let faceScoreVariance: Float var toArray: [FaceScannerInputOutput] { @@ -38,6 +41,7 @@ extension FaceCaptureData { first: first, last: last, bestMiddle: bestMiddle, + numSamples: samples.count, faceScoreVariance: samples.standardDeviation(with: { $0.scannerOutput.faceScore }) ) } diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanningSession/ImageScanningSession.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanningSession/ImageScanningSession.swift index 9dd70953354..9248990b2ac 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanningSession/ImageScanningSession.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageScanningSession/ImageScanningSession.swift @@ -343,4 +343,8 @@ extension ImageScanningSession where ExpectedClassificationType == EmptyClassifi func startTimeoutTimer() { startTimeoutTimer(expectedClassification: .empty) } + + func reset() { + reset(to: .empty) + } } diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/DocumentUploader.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/DocumentUploader.swift index 404be4007f2..68d5eee100b 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/DocumentUploader.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/DocumentUploader.swift @@ -175,20 +175,39 @@ final class DocumentUploader: DocumentUploaderProtocol { method: StripeAPI.VerificationPageDataDocumentFileData.FileUploadMethod, fileNamePrefix: String ) -> Future { - return imageUploader.uploadLowAndHighResImages( - originalImage, - highResRegionOfInterest: documentScannerOutput?.idDetectorOutput.documentBounds, - cropPaddingComputationMethod: .maxImageWidthOrHeight, - lowResFileName: "\(fileNamePrefix)_full_frame", - highResFileName: fileNamePrefix - ).chained { (lowResFile, highResFile) in - return Promise(value: StripeAPI.VerificationPageDataDocumentFileData( - documentScannerOutput: documentScannerOutput, - highResImage: highResFile.id, - lowResImage: lowResFile?.id, - exifMetadata: exifMetadata, - uploadMethod: method - )) + + // Only upload a low res image if the high res image will be cropped + if let documentBounds = documentScannerOutput?.idDetectorOutput.documentBounds { + return imageUploader.uploadLowAndHighResImages( + originalImage, + highResRegionOfInterest: documentBounds, + cropPaddingComputationMethod: .maxImageWidthOrHeight, + lowResFileName: "\(fileNamePrefix)_full_frame", + highResFileName: fileNamePrefix + ).chained { (lowResFile, highResFile) in + return Promise(value: StripeAPI.VerificationPageDataDocumentFileData( + documentScannerOutput: documentScannerOutput, + highResImage: highResFile.id, + lowResImage: lowResFile.id, + exifMetadata: exifMetadata, + uploadMethod: method + )) + } + } else { + return imageUploader.uploadHighResImage( + originalImage, + regionOfInterest: nil, + cropPaddingComputationMethod: .maxImageWidthOrHeight, + fileName: fileNamePrefix + ).chained { highResFile in + return Promise(value: StripeAPI.VerificationPageDataDocumentFileData( + documentScannerOutput: documentScannerOutput, + highResImage: highResFile.id, + lowResImage: nil, + exifMetadata: exifMetadata, + uploadMethod: method + )) + } } } diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/IdentityImageUploader.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/IdentityImageUploader.swift index ef0616f996d..84262a0be22 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/IdentityImageUploader.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/IdentityImageUploader.swift @@ -11,7 +11,7 @@ import UIKit @_spi(STP) import StripeCameraCore final class IdentityImageUploader { - typealias LowHighResFiles = (lowRes: StripeFile?, highRes: StripeFile) + typealias LowHighResFiles = (lowRes: StripeFile, highRes: StripeFile) struct Configuration { /// The `purpose` to use when uploading the files @@ -46,18 +46,15 @@ final class IdentityImageUploader { func uploadLowAndHighResImages( _ image: CGImage, - highResRegionOfInterest: CGRect?, + highResRegionOfInterest: CGRect, cropPaddingComputationMethod: CGImage.CropPaddingComputationMethod, lowResFileName: String, highResFileName: String ) -> Future { - // Only upload a low res image if the high res image will be cropped - let lowResUploadFuture: Future = (highResRegionOfInterest == nil) - ? Promise(value: nil) - : uploadLowResImage( - image, - fileName: lowResFileName - ).chained { Promise(value: $0) } + let lowResUploadFuture = uploadLowResImage( + image, + fileName: lowResFileName + ) return uploadHighResImage( image, diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/SelfieUploader.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/SelfieUploader.swift index 6bec0da2efd..df486a78b53 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/SelfieUploader.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/ImageUploaders/SelfieUploader.swift @@ -10,28 +10,9 @@ import UIKit @_spi(STP) import StripeCore @_spi(STP) import StripeCameraCore -// TODO(mludowise): This is a temporary API object placeholder until the API changes are made -struct VerificationPageDataSelfieFileData { - init( - bestMiddleImageFiles: IdentityImageUploader.LowHighResFiles, - firstImageFiles: IdentityImageUploader.LowHighResFiles, - lastImageFiles: IdentityImageUploader.LowHighResFiles - ) { - // TODO(mludowise): Save image files to API object - #if DEBUG - print("best high res: \(bestMiddleImageFiles.highRes.id)") - print("best low res: \(String(describing: bestMiddleImageFiles.lowRes?.id))") - print("first high res: \(firstImageFiles.highRes.id)") - print("first low res: \(String(describing: firstImageFiles.lowRes?.id))") - print("last high res: \(lastImageFiles.highRes.id)") - print("last low res: \(String(describing: lastImageFiles.lowRes?.id))") - #endif - } -} - /// Dependency-injectable protocol for SelfieUploader protocol SelfieUploaderProtocol: AnyObject { - var uploadFuture: Future? { get } + var uploadFuture: Future? { get } func uploadImages( _ capturedImages: FaceCaptureData @@ -43,9 +24,18 @@ protocol SelfieUploaderProtocol: AnyObject { final class SelfieUploader: SelfieUploaderProtocol { + struct FileData { + let bestHighResFile: StripeFile + let bestLowResFile: StripeFile + let firstHighResFile: StripeFile + let firstLowResFile: StripeFile + let lastHighResFile: StripeFile + let lastLowResFile: StripeFile + } + let imageUploader: IdentityImageUploader - private(set) var uploadFuture: Future? + private(set) var uploadFuture: Future? init(imageUploader: IdentityImageUploader) { self.imageUploader = imageUploader @@ -69,10 +59,13 @@ final class SelfieUploader: SelfieUploaderProtocol { return firstUploadFuture.chained { firstFiles in return lastUploadFuture.chained { lastFiles in return Promise( - value: VerificationPageDataSelfieFileData( - bestMiddleImageFiles: bestFiles, - firstImageFiles: firstFiles, - lastImageFiles: lastFiles + value: FileData( + bestHighResFile: bestFiles.highRes, + bestLowResFile: bestFiles.lowRes, + firstHighResFile: firstFiles.highRes, + firstLowResFile: firstFiles.lowRes, + lastHighResFile: lastFiles.highRes, + lastLowResFile: lastFiles.lowRes ) ) } diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/VerificationSheetController.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/VerificationSheetController.swift index 1643f10f787..d1dfeefdf59 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/VerificationSheetController.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/Coordinators/VerificationSheetController.swift @@ -47,6 +47,7 @@ protocol VerificationSheetControllerProtocol: AnyObject { func saveSelfieFileDataAndTransition( selfieUploader: SelfieUploaderProtocol, + capturedImages: FaceCaptureData, trainingConsent: Bool?, completion: @escaping () -> Void ) @@ -188,21 +189,30 @@ final class VerificationSheetController: VerificationSheetControllerProtocol { func saveSelfieFileDataAndTransition( selfieUploader: SelfieUploaderProtocol, + capturedImages: FaceCaptureData, trainingConsent: Bool?, completion: @escaping () -> Void ) { - selfieUploader.uploadFuture?.chained { [weak flowController, apiClient] _ -> Future in - // TODO(mludowise|IDPROD-3821): Save face file data / consent instead of nil + var optionalCollectedData: StripeAPI.VerificationPageCollectedData? + selfieUploader.uploadFuture?.chained { [weak flowController, apiClient] uploadedFiles -> Future in + let collectedData = StripeAPI.VerificationPageCollectedData( + face: .init( + uploadedFiles: uploadedFiles, + capturedImages: capturedImages, + bestFrameExifMetadata: capturedImages.bestMiddle.cameraExifMetadata, + trainingConsent: trainingConsent + ) + ) + optionalCollectedData = collectedData return apiClient.updateIdentityVerificationPageData( updating: StripeAPI.VerificationPageDataUpdate( clearData: .init(clearFields: flowController?.uncollectedFields ?? []), - collectedData: nil + collectedData: collectedData ) ) }.observe(on: .main) { [weak self] result in - // TODO(mludowise|IDPROD-3821): use updated collectedData instead of nil self?.saveCheckSubmitAndTransition( - collectedData: nil, + collectedData: optionalCollectedData, updateDataResult: result, completion: completion ) diff --git a/StripeIdentity/StripeIdentity/Source/NativeComponents/ViewControllers/SelfieCaptureViewController.swift b/StripeIdentity/StripeIdentity/Source/NativeComponents/ViewControllers/SelfieCaptureViewController.swift index b1f104ab545..ec89d942f6f 100644 --- a/StripeIdentity/StripeIdentity/Source/NativeComponents/ViewControllers/SelfieCaptureViewController.swift +++ b/StripeIdentity/StripeIdentity/Source/NativeComponents/ViewControllers/SelfieCaptureViewController.swift @@ -275,8 +275,10 @@ extension SelfieCaptureViewController { func saveDataAndTransitionToNextScreen( faceCaptureData: FaceCaptureData ) { + imageScanningSession.setStateSaving(faceCaptureData) self.sheetController?.saveSelfieFileDataAndTransition( selfieUploader: selfieUploader, + capturedImages: faceCaptureData, trainingConsent: consentSelection ) { [weak self] in self?.imageScanningSession.setStateScanned(capturedData: faceCaptureData) @@ -330,7 +332,11 @@ extension SelfieCaptureViewController: ImageScanningSessionDelegate { } // Update the number of collected samples - collectedSamples.append(.init(image: image, scannerOutput: scannerOutput)) + collectedSamples.append(.init( + image: image, + scannerOutput: scannerOutput, + cameraExifMetadata: exifMetadata + )) // Reset timeout timer scanningSession.stopTimeoutTimer() @@ -359,4 +365,8 @@ extension SelfieCaptureViewController: IdentityDataCollecting { var collectedFields: Set { return [.face] } + + func reset() { + imageScanningSession.reset() + } } diff --git a/StripeIdentity/StripeIdentityTests/Helpers/VerificationSheetControllerMock.swift b/StripeIdentity/StripeIdentityTests/Helpers/VerificationSheetControllerMock.swift index cc80f848adc..1c96362cabe 100644 --- a/StripeIdentity/StripeIdentityTests/Helpers/VerificationSheetControllerMock.swift +++ b/StripeIdentity/StripeIdentityTests/Helpers/VerificationSheetControllerMock.swift @@ -25,7 +25,7 @@ final class VerificationSheetControllerMock: VerificationSheetControllerProtocol private(set) var savedData: StripeAPI.VerificationPageCollectedData? private(set) var uploadedDocumentsResult: Result? - private(set) var uploadedSelfieResult: Result? + private(set) var uploadedSelfieResult: Result? init( apiClient: IdentityAPIClient = IdentityAPIClientTestMock(), @@ -66,6 +66,7 @@ final class VerificationSheetControllerMock: VerificationSheetControllerProtocol func saveSelfieFileDataAndTransition( selfieUploader: SelfieUploaderProtocol, + capturedImages: FaceCaptureData, trainingConsent: Bool?, completion: @escaping () -> Void ) { diff --git a/StripeIdentity/StripeIdentityTests/Unit/NativeComponents/Coordinators/VerificationSheetControllerTest.swift b/StripeIdentity/StripeIdentityTests/Unit/NativeComponents/Coordinators/VerificationSheetControllerTest.swift index fedfcf900a7..2e6ef2a765f 100644 --- a/StripeIdentity/StripeIdentityTests/Unit/NativeComponents/Coordinators/VerificationSheetControllerTest.swift +++ b/StripeIdentity/StripeIdentityTests/Unit/NativeComponents/Coordinators/VerificationSheetControllerTest.swift @@ -126,6 +126,7 @@ final class VerificationSheetControllerTest: XCTestCase { .init( clearData: .init( biometricConsent: false, + face: false, idDocumentBack: true, idDocumentFront: true, idDocumentType: true diff --git a/modules.yaml b/modules.yaml index 1b5371b4401..5b4f13c2316 100644 --- a/modules.yaml +++ b/modules.yaml @@ -103,7 +103,7 @@ modules: supports_catalyst: false size_report: max_compressed_size: 1500 - max_uncompressed_size: 2500 + max_uncompressed_size: 3000 max_incremental_uncompressed_size: 100 docs: output: docs/stripe-identity