This commit is contained in:
fmodf 2024-07-14 17:02:41 +02:00
parent e21d1a1ce9
commit 73c7aa5563

View file

@ -45,66 +45,55 @@ final class FileProcessing {
} }
func fetchGallery() -> [SharingGalleryItem] { func fetchGallery() -> [SharingGalleryItem] {
let fetchOptions = PHFetchOptions() let items = syncGalleryEnumerate()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)] .map {
let assets = PHAsset.fetchAssets(with: fetchOptions) SharingGalleryItem(
var items: [SharingGalleryItem] = [] id: $0.localIdentifier,
assets.enumerateObjects { asset, _, _ in type: $0.mediaType == .image ? .photo : .video,
if asset.mediaType == .image { duration: $0.mediaType == .video ? $0.duration.minAndSec : nil
items.append(.init(id: asset.localIdentifier, type: .photo)) )
} else if asset.mediaType == .video {
items.append(.init(id: asset.localIdentifier, type: .video))
}
} }
return items return items
} }
func fillGalleryItemsThumbnails(items: [SharingGalleryItem]) -> [SharingGalleryItem] { func fillGalleryItemsThumbnails(items: [SharingGalleryItem]) -> [SharingGalleryItem] {
var result: [SharingGalleryItem] = []
let ids = items let ids = items
.filter { $0.thumbnail == nil } .filter { $0.thumbnail == nil }
.map { $0.id } .map { $0.id }
let assets = PHAsset.fetchAssets(withLocalIdentifiers: ids, options: nil) let assets = syncGalleryEnumerate(ids)
assets.enumerateObjects { asset, _, _ in return assets.compactMap { asset in
if asset.mediaType == .image { if asset.mediaType == .image {
PHImageManager.default().requestImage( return syncGalleryProcessImage(asset) { [weak self] image in
for: asset, if let thumbnail = self?.scaleAndCropImage(image, CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) {
targetSize: PHImageManagerMaximumSize, let data = thumbnail.jpegData(compressionQuality: 1.0) ?? Data()
contentMode: .aspectFill, return SharingGalleryItem(id: asset.localIdentifier, type: .photo, thumbnail: data)
options: nil } else {
) { image, _ in return nil
image?.scaleAndCropImage(toExampleSize: CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) { image in
if let image {
let data = image.jpegData(compressionQuality: 1.0) ?? Data()
result.append(.init(id: asset.localIdentifier, type: .photo, thumbnail: data))
}
} }
} }
} else if asset.mediaType == .video { } else if asset.mediaType == .video {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in return syncGalleryProcessVideo(asset) { [weak self] avAsset in
if let avAsset { // swiftlint:disable:next force_cast
let imageGenerator = AVAssetImageGenerator(asset: avAsset) let assetURL = avAsset as! AVURLAsset
imageGenerator.appliesPreferredTrackTransform = true let url = assetURL.url
let time = CMTimeMake(value: 1, timescale: 2) if let thumbnail = self?.generateVideoThumbnail(url, CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) {
do { let data = thumbnail.jpegData(compressionQuality: 1.0) ?? Data()
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil) return SharingGalleryItem(
let thumbnail = UIImage(cgImage: imageRef) id: asset.localIdentifier,
thumbnail.scaleAndCropImage(toExampleSize: CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) { image in type: .video,
if let image { thumbnail: data,
let data = image.jpegData(compressionQuality: 1.0) ?? Data() duration: asset.duration.minAndSec
result.append(.init(id: asset.localIdentifier, type: .video, thumbnail: data)) )
} else {
return nil
} }
} }
} catch { } else {
print("Failed to create thumbnail image") return nil
} }
} }
} }
}
}
return result
}
} }
private extension FileProcessing { private extension FileProcessing {
@ -129,9 +118,11 @@ private extension FileProcessing {
return newImage return newImage
} }
func syncEnumrate(_ ids: [String]? = nil) -> [PHAsset] { func syncGalleryEnumerate(_ ids: [String]? = nil) -> [PHAsset] {
var result: [PHAsset] = [] var result: [PHAsset] = []
let group = DispatchGroup()
DispatchQueue.global(qos: .userInitiated).sync {
let fetchOptions = PHFetchOptions() let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)] fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
if let ids { if let ids {
@ -139,8 +130,83 @@ private extension FileProcessing {
} }
let assets = PHAsset.fetchAssets(with: fetchOptions) let assets = PHAsset.fetchAssets(with: fetchOptions)
assets.enumerateObjects { asset, _, _ in assets.enumerateObjects { asset, _, _ in
group.enter()
result.append(asset) result.append(asset)
group.leave()
} }
}
group.wait()
return result return result
} }
func syncGalleryProcess<T>(_ assets: [PHAsset], _ block: @escaping (PHAsset) -> T) -> [T] {
var result: [T] = []
let group = DispatchGroup()
DispatchQueue.global(qos: .userInitiated).sync {
for asset in assets {
group.enter()
let res = block(asset)
result.append(res)
group.leave()
}
}
group.wait()
return result
}
func syncGalleryProcessImage<T>(_ asset: PHAsset, _ block: @escaping (UIImage) -> T?) -> T? {
var result: T?
let semaphore = DispatchSemaphore(value: 0)
DispatchQueue.global(qos: .userInitiated).sync {
let options = PHImageRequestOptions()
options.version = .original
options.isSynchronous = true
PHImageManager.default().requestImage(
for: asset,
targetSize: PHImageManagerMaximumSize,
contentMode: .aspectFill,
options: options
) { image, _ in
if let image {
result = block(image)
} else {
result = nil
}
semaphore.signal()
}
}
semaphore.wait()
return result
}
func syncGalleryProcessVideo<T>(_ asset: PHAsset, _ block: @escaping (AVAsset) -> T?) -> T? {
var result: T?
let semaphore = DispatchSemaphore(value: 0)
_ = DispatchQueue.global(qos: .userInitiated).sync {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
result = block(avAsset)
} else {
result = nil
}
semaphore.signal()
}
}
semaphore.wait()
return result
}
func generateVideoThumbnail(_ url: URL, _ size: CGSize) -> UIImage? {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(Float64(1), preferredTimescale: 600)
do {
let cgImage = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let image = UIImage(cgImage: cgImage)
return scaleAndCropImage(image, size)
} catch {
return nil
}
}
} }