This commit is contained in:
fmodf 2024-07-14 17:02:41 +02:00
parent e21d1a1ce9
commit 73c7aa5563

View file

@ -45,65 +45,54 @@ final class FileProcessing {
}
func fetchGallery() -> [SharingGalleryItem] {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let assets = PHAsset.fetchAssets(with: fetchOptions)
var items: [SharingGalleryItem] = []
assets.enumerateObjects { asset, _, _ in
if asset.mediaType == .image {
items.append(.init(id: asset.localIdentifier, type: .photo))
} else if asset.mediaType == .video {
items.append(.init(id: asset.localIdentifier, type: .video))
let items = syncGalleryEnumerate()
.map {
SharingGalleryItem(
id: $0.localIdentifier,
type: $0.mediaType == .image ? .photo : .video,
duration: $0.mediaType == .video ? $0.duration.minAndSec : nil
)
}
}
return items
}
func fillGalleryItemsThumbnails(items: [SharingGalleryItem]) -> [SharingGalleryItem] {
var result: [SharingGalleryItem] = []
let ids = items
.filter { $0.thumbnail == nil }
.map { $0.id }
let assets = PHAsset.fetchAssets(withLocalIdentifiers: ids, options: nil)
assets.enumerateObjects { asset, _, _ in
let assets = syncGalleryEnumerate(ids)
return assets.compactMap { asset in
if asset.mediaType == .image {
PHImageManager.default().requestImage(
for: asset,
targetSize: PHImageManagerMaximumSize,
contentMode: .aspectFill,
options: nil
) { image, _ in
image?.scaleAndCropImage(toExampleSize: CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) { image in
if let image {
let data = image.jpegData(compressionQuality: 1.0) ?? Data()
result.append(.init(id: asset.localIdentifier, type: .photo, thumbnail: data))
}
return syncGalleryProcessImage(asset) { [weak self] image in
if let thumbnail = self?.scaleAndCropImage(image, CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) {
let data = thumbnail.jpegData(compressionQuality: 1.0) ?? Data()
return SharingGalleryItem(id: asset.localIdentifier, type: .photo, thumbnail: data)
} else {
return nil
}
}
} else if asset.mediaType == .video {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
let imageGenerator = AVAssetImageGenerator(asset: avAsset)
imageGenerator.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 1, timescale: 2)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: imageRef)
thumbnail.scaleAndCropImage(toExampleSize: CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) { image in
if let image {
let data = image.jpegData(compressionQuality: 1.0) ?? Data()
result.append(.init(id: asset.localIdentifier, type: .video, thumbnail: data))
}
}
} catch {
print("Failed to create thumbnail image")
}
return syncGalleryProcessVideo(asset) { [weak self] avAsset in
// swiftlint:disable:next force_cast
let assetURL = avAsset as! AVURLAsset
let url = assetURL.url
if let thumbnail = self?.generateVideoThumbnail(url, CGSize(width: Const.galleryGridSize, height: Const.galleryGridSize)) {
let data = thumbnail.jpegData(compressionQuality: 1.0) ?? Data()
return SharingGalleryItem(
id: asset.localIdentifier,
type: .video,
thumbnail: data,
duration: asset.duration.minAndSec
)
} else {
return nil
}
}
} else {
return nil
}
}
return result
}
}
@ -129,18 +118,95 @@ private extension FileProcessing {
return newImage
}
func syncEnumrate(_ ids: [String]? = nil) -> [PHAsset] {
func syncGalleryEnumerate(_ ids: [String]? = nil) -> [PHAsset] {
var result: [PHAsset] = []
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
if let ids {
fetchOptions.predicate = NSPredicate(format: "localIdentifier IN %@", ids)
}
let assets = PHAsset.fetchAssets(with: fetchOptions)
assets.enumerateObjects { asset, _, _ in
result.append(asset)
let group = DispatchGroup()
DispatchQueue.global(qos: .userInitiated).sync {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
if let ids {
fetchOptions.predicate = NSPredicate(format: "localIdentifier IN %@", ids)
}
let assets = PHAsset.fetchAssets(with: fetchOptions)
assets.enumerateObjects { asset, _, _ in
group.enter()
result.append(asset)
group.leave()
}
}
group.wait()
return result
}
func syncGalleryProcess<T>(_ assets: [PHAsset], _ block: @escaping (PHAsset) -> T) -> [T] {
var result: [T] = []
let group = DispatchGroup()
DispatchQueue.global(qos: .userInitiated).sync {
for asset in assets {
group.enter()
let res = block(asset)
result.append(res)
group.leave()
}
}
group.wait()
return result
}
func syncGalleryProcessImage<T>(_ asset: PHAsset, _ block: @escaping (UIImage) -> T?) -> T? {
var result: T?
let semaphore = DispatchSemaphore(value: 0)
DispatchQueue.global(qos: .userInitiated).sync {
let options = PHImageRequestOptions()
options.version = .original
options.isSynchronous = true
PHImageManager.default().requestImage(
for: asset,
targetSize: PHImageManagerMaximumSize,
contentMode: .aspectFill,
options: options
) { image, _ in
if let image {
result = block(image)
} else {
result = nil
}
semaphore.signal()
}
}
semaphore.wait()
return result
}
func syncGalleryProcessVideo<T>(_ asset: PHAsset, _ block: @escaping (AVAsset) -> T?) -> T? {
var result: T?
let semaphore = DispatchSemaphore(value: 0)
_ = DispatchQueue.global(qos: .userInitiated).sync {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
result = block(avAsset)
} else {
result = nil
}
semaphore.signal()
}
}
semaphore.wait()
return result
}
func generateVideoThumbnail(_ url: URL, _ size: CGSize) -> UIImage? {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(Float64(1), preferredTimescale: 600)
do {
let cgImage = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let image = UIImage(cgImage: cgImage)
return scaleAndCropImage(image, size)
} catch {
return nil
}
}
}