diff --git a/CLIP-Finder2.xcodeproj/project.pbxproj b/CLIP-Finder2.xcodeproj/project.pbxproj index 47475ee..554a94a 100644 --- a/CLIP-Finder2.xcodeproj/project.pbxproj +++ b/CLIP-Finder2.xcodeproj/project.pbxproj @@ -24,7 +24,6 @@ 1C62E8EE2C2A211000C1C637 /* PhotosDB.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8EC2C2A211000C1C637 /* PhotosDB.xcdatamodeld */; }; 1C62E8F02C2A219D00C1C637 /* Preprocessing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8EF2C2A219D00C1C637 /* Preprocessing.swift */; }; 1C62E8F42C2A221F00C1C637 /* CoreDataManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8F32C2A221F00C1C637 /* CoreDataManager.swift */; }; - 1C62E8F82C2A225E00C1C637 /* ImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */; }; 1C62E8FC2C2A238500C1C637 /* MLMultiArrayExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */; }; 1C62E8FE2C2A24C200C1C637 /* MLMultiArrayTransformer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FD2C2A24C200C1C637 /* MLMultiArrayTransformer.swift */; }; 1C62E9002C2A259F00C1C637 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FF2C2A259F00C1C637 /* AppDelegate.swift */; }; @@ -52,7 +51,6 @@ 1C62E8ED2C2A211000C1C637 /* PhotosDB.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = PhotosDB.xcdatamodel; sourceTree = ""; }; 1C62E8EF2C2A219D00C1C637 /* Preprocessing.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Preprocessing.swift; sourceTree = ""; }; 1C62E8F32C2A221F00C1C637 /* CoreDataManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreDataManager.swift; sourceTree = ""; }; - 1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageViewController.swift; sourceTree = ""; }; 1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MLMultiArrayExtension.swift; sourceTree = ""; }; 1C62E8FD2C2A24C200C1C637 /* MLMultiArrayTransformer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MLMultiArrayTransformer.swift; sourceTree = ""; }; 1C62E8FF2C2A259F00C1C637 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; @@ -111,7 +109,6 @@ 1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */, 1C62E9072C2B556400C1C637 /* MPSGraphPostProcessing.swift */, 1C0105DC2C341F66005870B5 /* clip_mci_image.mlpackage */, - 1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */, 1C62E90D2C2C9EC300C1C637 /* CLIP_Tokenizer.swift */, 1C0F0CC62C2E02520007191B /* clip_text.mlpackage */, 1C0F0CC82C2E03FF0007191B /* CLIPTextModel.swift */, @@ -203,7 +200,6 @@ buildActionMask = 2147483647; files = ( 1C0105DF2C345433005870B5 /* ProfilerPerformanceStats.swift in Sources */, - 1C62E8F82C2A225E00C1C637 /* ImageViewController.swift in Sources */, 1C0F0CCB2C2F12E40007191B /* CameraPreviewView.swift in Sources */, 1C0F0CC72C2E02520007191B /* clip_text.mlpackage in Sources */, 1C62E9082C2B556400C1C637 /* MPSGraphPostProcessing.swift in Sources */, diff --git a/CLIP-Finder2/Assets.xcassets/AppIcon.appiconset/CLIPFinderIcon_low-1024.png b/CLIP-Finder2/Assets.xcassets/AppIcon.appiconset/CLIPFinderIcon_low-1024.png index 6cfe960..28fa4b6 100644 Binary files a/CLIP-Finder2/Assets.xcassets/AppIcon.appiconset/CLIPFinderIcon_low-1024.png and b/CLIP-Finder2/Assets.xcassets/AppIcon.appiconset/CLIPFinderIcon_low-1024.png differ diff --git a/CLIP-Finder2/CLIPImageModel.swift b/CLIP-Finder2/CLIPImageModel.swift index 557c238..2684ff2 100644 --- a/CLIP-Finder2/CLIPImageModel.swift +++ b/CLIP-Finder2/CLIPImageModel.swift @@ -24,7 +24,9 @@ final class CLIPImageModel { do { try await loadModel() } catch { + #if DEBUG print("Failed to load model: \(error)") + #endif } } } @@ -37,19 +39,23 @@ final class CLIPImageModel { do { try await loadModel() } catch { + #if DEBUG print("Failed to reload model: \(error)") + #endif } } private func loadModel() async throws { guard let modelURL = Bundle.main.url(forResource: "clip_mci_image", withExtension: "mlmodelc") else { - print("Current bundle URL: \(Bundle.main.bundleURL)") +// print("Current bundle URL: \(Bundle.main.bundleURL)") throw DataModelError.modelFileNotFound } // let compiledURL = try await MLModel.compileModel(at: modelURL) model = try await MLModel.load(contentsOf: modelURL, configuration: configuration) - print("Model loaded successfully.") + #if DEBUG + print("CLIP image model loaded successfully.") + #endif } @@ -69,7 +75,9 @@ final class CLIPImageModel { throw NSError(domain: "DataModel", code: 3, userInfo: [NSLocalizedDescriptionKey: "Failed to retrieve MLMultiArray from prediction"]) } } catch { + #if DEBUG print("Failed to perform inference: \(error)") + #endif throw error } } diff --git a/CLIP-Finder2/CLIPTextModel.swift b/CLIP-Finder2/CLIPTextModel.swift index cb75753..7cdc6f4 100644 --- a/CLIP-Finder2/CLIPTextModel.swift +++ b/CLIP-Finder2/CLIPTextModel.swift @@ -26,7 +26,9 @@ final class CLIPTextModel { do { try await loadModel() } catch { + #if DEBUG print("Failed to load CLIP text model: \(error)") + #endif } } } @@ -39,18 +41,21 @@ final class CLIPTextModel { do { try await loadModel() } catch { + #if DEBUG print("Failed to reload CLIP text model: \(error)") + #endif } } private func loadModel() async throws { guard let modelURL = Bundle.main.url(forResource: "clip_text", withExtension: "mlmodelc") else { - print("Current bundle URL: \(Bundle.main.bundleURL)") throw CLIPTextModelError.modelFileNotFound } model = try await MLModel.load(contentsOf: modelURL, configuration: configuration) + #if DEBUG print("CLIP text model loaded successfully.") + #endif } func performInference(_ tokens: [Int32]) async throws -> MLMultiArray? { @@ -74,7 +79,9 @@ final class CLIPTextModel { throw CLIPTextModelError.predictionFailed } } catch { + #if DEBUG print("Failed to perform CLIP text inference: \(error)") + #endif throw error } } diff --git a/CLIP-Finder2/CameraManager.swift b/CLIP-Finder2/CameraManager.swift index 91ea680..aca1414 100644 --- a/CLIP-Finder2/CameraManager.swift +++ b/CLIP-Finder2/CameraManager.swift @@ -59,7 +59,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB session.addInput(input!) } } catch { + #if DEBUG print("Error setting device input: \(error.localizedDescription)") + #endif return } @@ -121,7 +123,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB session.addInput(input!) } } catch { + #if DEBUG print("Error switching camera: \(error.localizedDescription)") + #endif } session.commitConfiguration() @@ -145,7 +149,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB camera.unlockForConfiguration() } catch { + #if DEBUG print("Error configurando la cámara: \(error)") + #endif } } @@ -167,7 +173,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB camera.unlockForConfiguration() } catch { + #if DEBUG print("Error al enfocar: \(error)") + #endif } } } diff --git a/CLIP-Finder2/CoreDataManager.swift b/CLIP-Finder2/CoreDataManager.swift index d20e382..b9cebe6 100644 --- a/CLIP-Finder2/CoreDataManager.swift +++ b/CLIP-Finder2/CoreDataManager.swift @@ -52,7 +52,9 @@ class CoreDataManager { try context.save() } catch { + #if DEBUG print("Failed to save vector: \(error)") + #endif } } } @@ -67,7 +69,9 @@ class CoreDataManager { let results = try context.fetch(fetchRequest) result = results.first?.vectorData as? MLMultiArray } catch { + #if DEBUG print("Failed to fetch vector: \(error)") + #endif } } return result @@ -89,9 +93,13 @@ class CoreDataManager { NSManagedObjectContext.mergeChanges(fromRemoteContextSave: changes, into: [self.viewContext]) try context.save() + #if DEBUG print("All data deleted successfully") + #endif } catch { + #if DEBUG print("Failed to delete data: \(error)") + #endif } } } @@ -103,7 +111,9 @@ class CoreDataManager { try context.save() } catch { let nsError = error as NSError + #if DEBUG print("Unresolved error \(nsError), \(nsError.userInfo)") + #endif } } } @@ -121,7 +131,9 @@ class CoreDataManager { return (id: id, vector: vector) } } catch { + #if DEBUG print("Failed to fetch photo vectors: \(error)") + #endif } } diff --git a/CLIP-Finder2/CoreMLProfiler.swift b/CLIP-Finder2/CoreMLProfiler.swift index 4d7cddd..7ecf09b 100644 --- a/CLIP-Finder2/CoreMLProfiler.swift +++ b/CLIP-Finder2/CoreMLProfiler.swift @@ -35,7 +35,9 @@ class ModelProfiler: ObservableObject { await clipImageModel.reloadModel() guard let dummyInput = createDummyWhitePixelBuffer(width: 256, height: 256) else { + #if DEBUG print("Failed to create dummy input") + #endif return } @@ -47,11 +49,15 @@ class ModelProfiler: ObservableObject { if let _ = try await self.clipImageModel.performInference(dummyInput) { done() } else { + #if DEBUG print("Inference returned nil") + #endif done() } } catch { + #if DEBUG print("Failed to perform inference: \(error)") + #endif done() } } @@ -79,11 +85,15 @@ class ModelProfiler: ObservableObject { if let _ = try await self.clipTextModel.performInference(dummyInput) { done() } else { + #if DEBUG print("Text inference returned nil") + #endif done() } } catch { + #if DEBUG print("Failed to perform text inference: \(error)") + #endif done() } } @@ -113,7 +123,9 @@ class ModelProfiler: ObservableObject { &pixelBuffer) guard status == kCVReturnSuccess, let buffer = pixelBuffer else { + #if DEBUG print("Failed to create CVPixelBuffer") + #endif return nil } @@ -128,7 +140,9 @@ class ModelProfiler: ObservableObject { bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else { + #if DEBUG print("Failed to create CGContext") + #endif return nil } diff --git a/CLIP-Finder2/ImageViewController.swift b/CLIP-Finder2/ImageViewController.swift deleted file mode 100644 index 9363643..0000000 --- a/CLIP-Finder2/ImageViewController.swift +++ /dev/null @@ -1,83 +0,0 @@ -// -// ImageViewController.swift -// CLIP-Finder2 -// -// Created by Fabio Guzman on 24/06/24. -// - -//import UIKit -// -//class ImageViewController: UIViewController { -// var image: UIImage? -// private var imageView: UIImageView! -// -// override func viewDidLoad() { -// super.viewDidLoad() -// view.backgroundColor = .black -// -// setupImageView() -// setupGestures() -// setupShareButton() -// } -// -// private func setupImageView() { -// imageView = UIImageView(image: image) -// imageView.contentMode = .scaleAspectFit -// imageView.frame = view.bounds -// imageView.isUserInteractionEnabled = true -// view.addSubview(imageView) -// } -// -// private func setupGestures() { -// let tapGesture = UITapGestureRecognizer(target: self, action: #selector(dismissView)) -// view.addGestureRecognizer(tapGesture) -// -// let pinchGesture = UIPinchGestureRecognizer(target: self, action: #selector(handlePinch)) -// imageView.addGestureRecognizer(pinchGesture) -// } -// -// private func setupShareButton() { -// let shareButton = UIButton(type: .system) -// shareButton.setImage(UIImage(systemName: "square.and.arrow.up"), for: .normal) -// shareButton.tintColor = .white -// shareButton.addTarget(self, action: #selector(shareImage), for: .touchUpInside) -// -// shareButton.translatesAutoresizingMaskIntoConstraints = false -// view.addSubview(shareButton) -// -// NSLayoutConstraint.activate([ -// shareButton.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: -20), -// shareButton.centerXAnchor.constraint(equalTo: view.centerXAnchor), -// shareButton.widthAnchor.constraint(equalToConstant: 50), -// shareButton.heightAnchor.constraint(equalToConstant: 50) -// ]) -// } -// -// @objc private func dismissView() { -// dismiss(animated: true, completion: nil) -// } -// -// @objc private func handlePinch(_ gesture: UIPinchGestureRecognizer) { -// guard let view = gesture.view else { return } -// -// view.transform = view.transform.scaledBy(x: gesture.scale, y: gesture.scale) -// gesture.scale = 1 -// } -// -// @objc private func shareImage() { -// guard let image = image else { return } -// -// let activityViewController = UIActivityViewController(activityItems: [image], applicationActivities: nil) -// activityViewController.completionWithItemsHandler = { (activityType, completed, returnedItems, activityError) in -// if let error = activityError { -// print("Activity error: \(error.localizedDescription)") -// } else if completed { -// print("Activity completed successfully") -// } else { -// print("Activity canceled") -// } -// } -// present(activityViewController, animated: true, completion: nil) -// } -//} -// diff --git a/CLIP-Finder2/MLMultiArrayTransformer.swift b/CLIP-Finder2/MLMultiArrayTransformer.swift index a8e517f..dccf501 100644 --- a/CLIP-Finder2/MLMultiArrayTransformer.swift +++ b/CLIP-Finder2/MLMultiArrayTransformer.swift @@ -26,7 +26,9 @@ class MLMultiArrayTransformer: ValueTransformer { let data = try NSKeyedArchiver.archivedData(withRootObject: multiArray, requiringSecureCoding: true) return data } catch { + #if DEBUG print("Failed to transform MLMultiArray to Data: \(error)") + #endif return nil } } @@ -40,7 +42,9 @@ class MLMultiArrayTransformer: ValueTransformer { let multiArray = try NSKeyedUnarchiver.unarchivedObject(ofClass: MLMultiArray.self, from: data) return multiArray } catch { + #if DEBUG print("Failed to reverse transform Data to MLMultiArray: \(error)") + #endif return nil } } diff --git a/CLIP-Finder2/PhotoGalleryViewModel.swift b/CLIP-Finder2/PhotoGalleryViewModel.swift index 0deeb4d..23d823f 100644 --- a/CLIP-Finder2/PhotoGalleryViewModel.swift +++ b/CLIP-Finder2/PhotoGalleryViewModel.swift @@ -111,9 +111,13 @@ class PhotoGalleryViewModel: ObservableObject { AVCaptureDevice.requestAccess(for: .video) { granted in DispatchQueue.main.async { if granted { + #if DEBUG print("Camera access granted") + #endif } else { + #if DEBUG print("Camera access denied") + #endif } completion(granted) } @@ -144,7 +148,9 @@ class PhotoGalleryViewModel: ObservableObject { private func performSearch(_ searchText: String) { guard let tokenizer = customTokenizer else { + #if DEBUG print("Tokenizer not initialized") + #endif return } @@ -158,20 +164,39 @@ class PhotoGalleryViewModel: ObservableObject { self.topPhotoIDs = topIDs } } else { + #if DEBUG print("Failed to get text features from CLIP text model") + #endif } } catch { + #if DEBUG print("Error performing CLIP text inference: \(error)") + #endif } } } func performImageSearch(from ciImage: CIImage) async { - guard isCameraActive else { return } - guard let cgImage = CIContext().createCGImage(ciImage, from: ciImage.extent) else { return } + guard isCameraActive else { + #if DEBUG + print("Camera is not active, skipping image search") + #endif + return + } + guard let cgImage = CIContext().createCGImage(ciImage, from: ciImage.extent) else { + #if DEBUG + print("Failed to create CGImage from CIImage") + #endif + return + } let uiImage = UIImage(cgImage: cgImage) - guard let pixelBuffer = Preprocessing.preprocessImage(uiImage, targetSize: CGSize(width: 256, height: 256)) else { return } + guard let pixelBuffer = Preprocessing.preprocessImage(uiImage, targetSize: CGSize(width: 256, height: 256)) else { + #if DEBUG + print("Failed to preprocess image") + #endif + return + } do { if let imageFeatures = try await clipImageModel.performInference(pixelBuffer) { @@ -180,10 +205,14 @@ class PhotoGalleryViewModel: ObservableObject { self.topPhotoIDs = topIDs } } else { - print("Inference returned nil.") + #if DEBUG + print("Clip Image Inference returned nil.") + #endif } } catch { + #if DEBUG print("Error performing inference: \(error)") + #endif } } @@ -193,7 +222,9 @@ class PhotoGalleryViewModel: ObservableObject { if status == .authorized { self.fetchPhotos() } else { + #if DEBUG print("Photo library access denied.") + #endif } } } @@ -215,7 +246,9 @@ class PhotoGalleryViewModel: ObservableObject { done() } } completion: { time in + #if DEBUG print("Process and cache completted in \(time) ms") + #endif } } } @@ -271,13 +304,25 @@ class PhotoGalleryViewModel: ObservableObject { if let vector = try await self.clipImageModel.performInference(pixelBufferCopy) { CoreDataManager.shared.saveVector(vector, for: identifier, in: backgroundContext) } else { - print("Inference returned nil for asset \(identifier)") + let error = NSError(domain: "CLIPImageModel", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "Inference returned nil for asset \(identifier)"]) + throw error } countQueue.sync { localProcessedCount += 1 } } catch { - print("Failed to perform inference: \(error)") + if let nsError = error as NSError? { + #if DEBUG + print("Error performing inference: \(nsError.localizedDescription)") + print("Error domain: \(nsError.domain), code: \(nsError.code)") + #endif + } else { + #if DEBUG + print("Unknown error occurred: \(error)") + #endif + } countQueue.sync { localProcessedCount += 1 } @@ -319,58 +364,17 @@ class PhotoGalleryViewModel: ObservableObject { done() } } completion: { time in - print("Process and cache completted in \(time) ms") + #if DEBUG + print("Process and cache completed in \(time) ms") print("Avg CLIP MCI Image Prediction Time: \(PerformanceStats.shared.averageClipMCIImagePredictionTime()) ms") print("Number of samples: \(PerformanceStats.shared.clipMCIImagePredictionTimes.count)") + #endif } } } } - private func createDummyWhitePixelBuffer(width: Int = 256, height: Int = 256) -> CVPixelBuffer? { - var pixelBuffer: CVPixelBuffer? - let attributes = [ - kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, - kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue, - kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue - ] as CFDictionary - - let status = CVPixelBufferCreate(kCFAllocatorDefault, - width, - height, - kCVPixelFormatType_32BGRA, - attributes, - &pixelBuffer) - - guard status == kCVReturnSuccess, let buffer = pixelBuffer else { - print("Failed to create CVPixelBuffer") - return nil - } - - CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags(rawValue: 0)) - let pixelData = CVPixelBufferGetBaseAddress(buffer) - - let rgbColorSpace = CGColorSpaceCreateDeviceRGB() - guard let context = CGContext(data: pixelData, - width: width, - height: height, - bitsPerComponent: 8, - bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), - space: rgbColorSpace, - bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else { - print("Failed to create CGContext") - return nil - } - - context.setFillColor(red: 1, green: 1, blue: 1, alpha: 1) - context.fill(CGRect(x: 0, y: 0, width: width, height: height)) - - CVPixelBufferUnlockBaseAddress(buffer, CVPixelBufferLockFlags(rawValue: 0)) - - return buffer - } - private func calculateAndPrintTopPhotoIDs(textFeatures: MLMultiArray) -> [String] { guard let device = MTLCreateSystemDefaultDevice() else { fatalError("Metal is not supported on this device") diff --git a/CLIP-Finder2/Preprocessing.swift b/CLIP-Finder2/Preprocessing.swift index 3586bfb..4fd98e1 100644 --- a/CLIP-Finder2/Preprocessing.swift +++ b/CLIP-Finder2/Preprocessing.swift @@ -28,7 +28,9 @@ class Preprocessing { static func preprocessImage(_ image: UIImage, targetSize: CGSize) -> CVPixelBuffer? { guard let inputTexture = loadTexture(from: image), let outputTexture = makeTexture(descriptor: descriptor(for: targetSize)) else { + #if DEBUG print("Failed to create textures") + #endif return nil } @@ -79,7 +81,9 @@ class Preprocessing { let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, pixelBufferOptions as CFDictionary, &pixelBuffer) guard status == kCVReturnSuccess, let buffer = pixelBuffer else { + #if DEBUG print("Failed to create CVPixelBuffer") + #endif return nil } diff --git a/CLIP-Finder2/ProfilerPerformanceStats.swift b/CLIP-Finder2/ProfilerPerformanceStats.swift index fc3bd1b..6b4780a 100644 --- a/CLIP-Finder2/ProfilerPerformanceStats.swift +++ b/CLIP-Finder2/ProfilerPerformanceStats.swift @@ -14,9 +14,9 @@ public func profile(_ title: String, operation: () -> T) -> T { let nanoTime = endTime.uptimeNanoseconds - startTime.uptimeNanoseconds let timeInterval = Double(nanoTime) / 1_000_000 - + #if DEBUG print("\(title) - Execution time: \(timeInterval) ms") - + #endif return result } @@ -44,8 +44,9 @@ public func profileAsync(_ title: String, operation: (@escaping () -> Void) -> V let endTime = DispatchTime.now() let nanoTime = endTime.uptimeNanoseconds - startTime.uptimeNanoseconds let timeInterval = Double(nanoTime) / 1_000_000 - + #if DEBUG print("\(title) - Execution time: \(timeInterval) ms") + #endif completion(timeInterval) } }