I’m building an iOS app for my personal use that analyzes my images on device using a Core ML model I trained. When fetching all the photos to analyze, it doesn’t fetch all of them, instead stopping around 1000 photos, give or take 100. Why is that? Here’s my code to help debug:
class PhotoLibraryManager: ObservableObject {
@Published var allAssets: [PHAsset] = []
@Published var selectedImage: UIImage? = nil
@Published var isSheetPresented = false
@AppStorage("photoCountLimit") var countLimit = true
private var thumbnailCache = NSCache()
init() {
fetchPhotoLibrary()
}
// MARK: - Fetch Photo Library
func fetchPhotoLibrary() {
PHPhotoLibrary.requestAuthorization { [weak self] status in
guard status == .authorized else {
print("Photo library access denied")
return
}
DispatchQueue.global(qos: .userInitiated).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
// only set fetch limit if countLimit is true
if self?.countLimit == true {
fetchOptions.fetchLimit = 100
}
// fetch all photo assets
let assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
let assetsArray = assets.objects(at: IndexSet(0.. Void) {
let cacheKey = NSString(string: asset.localIdentifier)
if let cachedImage = thumbnailCache.object(forKey: cacheKey) {
completion(cachedImage)
return
}
let options = PHImageRequestOptions()
options.isSynchronous = false
options.deliveryMode = .opportunistic
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(
for: asset,
targetSize: targetSize,
contentMode: .aspectFill,
options: options
) { [weak self] image, _ in
if let image = image {
self?.thumbnailCache.setObject(image, forKey: cacheKey)
}
completion(image)
}
}
// MARK: - Fetch Full Image
func fetchFullImage(for asset: PHAsset, completion: @escaping (UIImage?) -> Void) {
let options = PHImageRequestOptions()
options.isSynchronous = false
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(
for: asset,
targetSize: PHImageManagerMaximumSize,
contentMode: .aspectFit,
options: options
) { image, _ in
DispatchQueue.main.async {
completion(image)
}
}
}
// MARK: - Analyze Images
func analyzeAllImages(completion: @escaping ([String]) -> Void) {
DispatchQueue.global(qos: .userInitiated).async {
var results: [String] = []
let group = DispatchGroup()
for asset in self.allAssets {
group.enter()
self.fetchAnalyzeImage(for: asset) { [weak self] image in
if let image = image, let prediction = self?.analyzeImage(image: image) {
results.append(prediction)
}
group.leave()
}
}
group.notify(queue: .main) {
completion(results)
}
}
}
func fetchAnalyzeImage(for asset: PHAsset, completion: @escaping (UIImage?) -> Void) {
let options = PHImageRequestOptions()
options.isSynchronous = false
options.deliveryMode = .fastFormat
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(
for: asset,
targetSize: CGSize(width: 300, height: 300),
contentMode: .aspectFit,
options: options
) { image, _ in
completion(image)
}
}
func analyzeImage(image: UIImage) -> String? {
do {
guard let pixelBuffer = image.toPixelBuffer(size: CGSize(width: 360, height: 360)) else {
print("Failed to create pixel buffer")
return nil
}
let model = try NovaClassifier19f(configuration: MLModelConfiguration())
let output = try model.prediction(image: pixelBuffer)
return output.target
} catch {
print("Error analyzing image: \(error)")
return nil
}
}
}
Thanks for the help in advance!! (Let me know if I need to improve on anything in my question asking, I am extremely new to Stack Overflow.)
Discover more from TrendyShopToBuy
Subscribe to get the latest posts sent to your email.