I am working on an iOS application using SwiftUI where I want to convert a JPG and a MOV file to a live photo. I am utilizing the LivePhoto Class for this. The JPG and MOV files are displayed correctly in my WallpaperDetailView, but I am facing issues when trying to download the live photo to the gallery and generate the Live Photo.
-
I have added the necessary permissions for accessing the Photo
Library. -
I have verified that the app has the necessary permissions to access the Photo Library.
-
The JPEG and MOV files are successfully downloaded and can be displayed in the app.
-
The issue seems to occur when generating the Live Photo from the downloaded files.
Here is the relevant code and the errors I am encountering:
Console prints:
Play button should be visible
Image URL fetched and set: Optional(“https://firebasestorage.googleapis.com/…”)
Video is ready to play
Video downloaded to: file:///var/mobile/Containers/Data/Application/…/tmp/CFNetworkDownload_7rW5ny.tmp
Failed to generate Live Photo
WallpaperDetailView.swift:
import SwiftUI
import Kingfisher
import PhotosUI
import FirebaseStorage
import AVKit
import UniformTypeIdentifiers
struct WallpaperDetailView: View {
var wallpaper: Wallpaper
@State private var isLoading = false
@State private var isImageSaved = false
@State private var imageURL: URL?
@State private var livePhotoVideoURL: URL?
@State private var player: AVPlayer?
@State private var playerViewController: AVPlayerViewController?
@State private var isVideoReady = false
@State private var showBuffering = false
var body: some View {
ZStack {
if let imageURL = imageURL {
GeometryReader { geometry in
KFImage(imageURL)
.resizable()
.scaledToFill()
.frame(width: geometry.size.width, height: geometry.size.height + geometry.safeAreaInsets.top + geometry.safeAreaInsets.bottom)
.clipped()
.edgesIgnoringSafeArea(.all)
.opacity(playerViewController != nil ? 0 : 1)
}
}
if let playerViewController = playerViewController {
VideoPlayerViewController(playerViewController: playerViewController)
.frame(maxWidth: .infinity, maxHeight: .infinity)
.clipped()
.edgesIgnoringSafeArea(.all)
}
}
.onAppear {
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
loadImage()
} else {
print("User denied access to photo library")
}
}
}
private func loadImage() {
isLoading = true
if let imageURLString = wallpaper.imageURL, let imageURL = URL(string: imageURLString) {
self.imageURL = imageURL
if imageURL.scheme == "file" {
self.isLoading = false
print("Local image URL set: (imageURL)")
} else {
fetchDownloadURL(from: imageURLString) { url in
self.imageURL = url
self.isLoading = false
print("Image URL fetched and set: (String(describing: url))")
}
}
}
if let livePhotoVideoURLString = wallpaper.livePhotoVideoURL, let livePhotoVideoURL = URL(string: livePhotoVideoURLString) {
self.livePhotoVideoURL = livePhotoVideoURL
preloadAndPlayVideo(from: livePhotoVideoURL)
} else {
self.isLoading = false
print("No valid image or video URL")
}
}
private func preloadAndPlayVideo(from url: URL) {
self.player = AVPlayer(url: url)
let playerViewController = AVPlayerViewController()
playerViewController.player = self.player
self.playerViewController = playerViewController
let playerItem = AVPlayerItem(url: url)
playerItem.preferredForwardBufferDuration = 1.0
self.player?.replaceCurrentItem(with: playerItem)
...
print("Live Photo Video URL set: (url)")
}
private func saveWallpaperToPhotos() {
if let imageURL = imageURL, let livePhotoVideoURL = livePhotoVideoURL {
saveLivePhotoToPhotos(imageURL: imageURL, videoURL: livePhotoVideoURL)
} else if let imageURL = imageURL {
saveImageToPhotos(url: imageURL)
}
}
private func saveImageToPhotos(url: URL) {
...
}
private func saveLivePhotoToPhotos(imageURL: URL, videoURL: URL) {
isLoading = true
downloadVideo(from: videoURL) { localVideoURL in
guard let localVideoURL = localVideoURL else {
print("Failed to download video for Live Photo")
DispatchQueue.main.async {
self.isLoading = false
}
return
}
print("Video downloaded to: (localVideoURL)")
self.generateAndSaveLivePhoto(imageURL: imageURL, videoURL: localVideoURL)
}
}
private func generateAndSaveLivePhoto(imageURL: URL, videoURL: URL) {
LivePhoto.generate(from: imageURL, videoURL: videoURL, progress: { percent in
print("Progress: (percent)")
}, completion: { livePhoto, resources in
guard let resources = resources else {
print("Failed to generate Live Photo")
DispatchQueue.main.async {
self.isLoading = false
}
return
}
print("Live Photo generated with resources: (resources)")
self.saveLivePhotoToLibrary(resources: resources)
})
}
private func saveLivePhotoToLibrary(resources: LivePhoto.LivePhotoResources) {
LivePhoto.saveToLibrary(resources) { success in
DispatchQueue.main.async {
if success {
self.isImageSaved = true
print("Live Photo saved successfully")
} else {
print("Failed to save Live Photo")
}
self.isLoading = false
}
}
}
private func fetchDownloadURL(from gsURL: String, completion: @escaping (URL?) -> Void) {
let storageRef = Storage.storage().reference(forURL: gsURL)
storageRef.downloadURL { url, error in
if let error = error {
print("Failed to fetch image URL: (error)")
completion(nil)
} else {
completion(url)
}
}
}
private func downloadVideo(from url: URL, completion: @escaping (URL?) -> Void) {
let task = URLSession.shared.downloadTask(with: url) { localURL, response, error in
guard let localURL = localURL, error == nil else {
print("Failed to download video: (String(describing: error))")
completion(nil)
return
}
completion(localURL)
}
task.resume()
}
}
LivePhoto:
import UIKit
import AVFoundation
import MobileCoreServices
import Photos
class LivePhoto {
// MARK: PUBLIC
typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
/// Returns the paired image and video for the given PHLivePhoto
public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
queue.async {
shared.extractResources(from: livePhoto, completion: completion)
}
}
/// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
queue.async {
shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
}
}
/// Save a Live Photo to the Photo Library by passing the paired image and video.
public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
let options = PHAssetResourceCreationOptions()
creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
}, completionHandler: { (success, error) in
if error != nil {
print(error as Any)
}
completion(success)
})
}
// MARK: PRIVATE
private static let shared = LivePhoto()
private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
lazy private var cacheDirectory: URL? = {
if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
}
return fullDirectory
}
return nil
}()
deinit {
clearCache()
}
private func generateKeyPhoto(from videoURL: URL) -> URL? {
var percent:Float = 0.5
let videoAsset = AVURLAsset(url: videoURL)
if let stillImageTime = videoAsset.stillImageTime() {
percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
}
guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
guard let jpegData = imageFrame.jpegData(compressionQuality: 1.0) else { return nil }
guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
do {
try? jpegData.write(to: url)
return url
}
}
private func clearCache() {
if let cacheDirectory = cacheDirectory {
try? FileManager.default.removeItem(at: cacheDirectory)
}
}
private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
guard let cacheDirectory = cacheDirectory else {
DispatchQueue.main.async {
completion(nil, nil)
}
return
}
let assetIdentifier = UUID().uuidString
let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
DispatchQueue.main.async {
completion(nil, nil)
}
return
}
addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in
if let pairedVideoURL = _videoURL {
_ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
return
}
DispatchQueue.main.async {
completion(livePhoto, (pairedImageURL, pairedVideoURL))
}
})
} else {
DispatchQueue.main.async {
completion(nil, nil)
}
}
}
}
private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
let assetResources = PHAssetResource.assetResources(for: livePhoto)
let group = DispatchGroup()
var keyPhotoURL: URL?
var videoURL: URL?
for resource in assetResources {
let buffer = NSMutableData()
let options = PHAssetResourceRequestOptions()
options.isNetworkAccessAllowed = true
group.enter()
PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
buffer.append(data)
}) { (error) in
if error == nil {
if resource.type == .pairedVideo {
videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
} else {
keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
}
} else {
print(error as Any)
}
group.leave()
}
}
group.notify(queue: DispatchQueue.main) {
guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
completion(nil)
return
}
completion((pairedPhotoURL, pairedVideoURL))
}
}
private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
if let cacheDirectory = cacheDirectory {
extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
}
}
private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue()
guard let ext = fileExtension else {
return nil
}
var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
fileUrl = fileUrl.appendingPathExtension(ext as String)
do {
try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
} catch {
print("Could not save resource (resource) to filepath (String(describing: fileUrl))")
return nil
}
return fileUrl
}
func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil),
var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil }
let assetIdentifierKey = "17"
let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier]
imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary)
CGImageDestinationFinalize(imageDestination)
return destinationURL
}
var audioReader: AVAssetReader?
var videoReader: AVAssetReader?
var assetWriter: AVAssetWriter?
func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
var audioWriterInput: AVAssetWriterInput?
var audioReaderOutput: AVAssetReaderOutput?
let videoAsset = AVURLAsset(url: videoURL)
let frameCount = videoAsset.countFrames(exact: false)
guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
completion(nil)
return
}
do {
// Create the Asset Writer
assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
// Create Video Reader Output
videoReader = try AVAssetReader(asset: videoAsset)
let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
videoReader?.add(videoReaderOutput)
// Create Video Writer Input
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : videoTrack.naturalSize.width, AVVideoHeightKey : videoTrack.naturalSize.height])
videoWriterInput.transform = videoTrack.preferredTransform
videoWriterInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoWriterInput)
// Create Audio Reader Output & Writer Input
if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
let _audioReader = try AVAssetReader(asset: videoAsset)
let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
_audioReader.add(_audioReaderOutput)
audioReader = _audioReader
audioReaderOutput = _audioReaderOutput
let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
_audioWriterInput.expectsMediaDataInRealTime = false
assetWriter?.add(_audioWriterInput)
audioWriterInput = _audioWriterInput
} catch {
print(error)
}
}
// Create necessary identifier metadata and still image time metadata
let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
assetWriter?.metadata = [assetIdentifierMetadata]
assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
// Start the Asset Writer
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: CMTime.zero)
// Add still image metadata
let _stillImagePercent: Float = 0.5
stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
// For end of writing / progress
var writingVideoFinished = false
var writingAudioFinished = false
var currentFrameCount = 0
func didCompleteWriting() {
guard writingAudioFinished && writingVideoFinished else { return }
assetWriter?.finishWriting {
if self.assetWriter?.status == .completed {
completion(destinationURL)
} else {
completion(nil)
}
}
}
// Start writing video
if videoReader?.startReading() ?? false {
videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
while videoWriterInput.isReadyForMoreMediaData {
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
currentFrameCount += 1
let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount)
progress(percent)
if !videoWriterInput.append(sampleBuffer) {
print("Cannot write: (String(describing: self.assetWriter?.error?.localizedDescription))")
self.videoReader?.cancelReading()
}
} else {
videoWriterInput.markAsFinished()
writingVideoFinished = true
didCompleteWriting()
}
}
}
} else {
writingVideoFinished = true
didCompleteWriting()
}
// Start writing audio
if audioReader?.startReading() ?? false {
audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
while audioWriterInput?.isReadyForMoreMediaData ?? false {
guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
audioWriterInput?.markAsFinished()
writingAudioFinished = true
didCompleteWriting()
return
}
audioWriterInput?.append(sampleBuffer)
}
}
} else {
writingAudioFinished = true
didCompleteWriting()
}
} catch {
print(error)
completion(nil)
}
}
private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
let item = AVMutableMetadataItem()
let keyContentIdentifier = "com.apple.quicktime.content.identifier"
let keySpaceQuickTimeMetadata = "mdta"
item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
item.dataType = "com.apple.metadata.datatype.UTF-8"
return item
}
private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
let spec : NSDictionary = [
kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
"(keySpaceQuickTimeMetadata)/(keyStillImageTime)",
kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
"com.apple.metadata.datatype.int8" ]
var desc : CMFormatDescription? = nil
CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
let input = AVAssetWriterInput(mediaType: .metadata,
outputSettings: nil, sourceFormatHint: desc)
return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
}
private func metadataItemForStillImageTime() -> AVMetadataItem {
let item = AVMutableMetadataItem()
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
item.value = 0 as (NSCopying & NSObjectProtocol)?
item.dataType = "com.apple.metadata.datatype.int8"
return item
}
}
fileprivate extension AVAsset {
func countFrames(exact:Bool) -> Int {
var frameCount = 0
if let videoReader = try? AVAssetReader(asset: self) {
if let videoTrack = self.tracks(withMediaType: .video).first {
frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
if exact {
frameCount = 0
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
videoReader.add(videoReaderOutput)
videoReader.startReading()
// count frames
while true {
let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
frameCount += 1
}
videoReader.cancelReading()
}
}
}
return frameCount
}
func stillImageTime() -> CMTime? {
var stillTime:CMTime? = nil
if let videoReader = try? AVAssetReader(asset: self) {
if let metadataTrack = self.tracks(withMediaType: .metadata).first {
let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
videoReader.add(videoReaderOutput)
videoReader.startReading()
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
var found = false
while found == false {
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
for item in group?.items ?? [] {
if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
stillTime = group?.timeRange.start
//print("stillImageTime = (CMTimeGetSeconds(stillTime!))")
found = true
break
}
}
}
}
else {
break;
}
}
videoReader.cancelReading()
}
}
return stillTime
}
func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
var time = self.duration
var frameCount = inFrameCount
if frameCount == 0 {
frameCount = self.countFrames(exact: true)
}
let frameDuration = Int64(Float(time.value) / Float(frameCount))
time.value = Int64(Float(time.value) * percent)
return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
}
func getAssetFrame(percent:Float) -> UIImage?
{
let imageGenerator = AVAssetImageGenerator(asset: self)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1,timescale: 100)
imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1,timescale: 100)
var time = self.duration
time.value = Int64(Float(time.value) * percent)
do {
var actualTime = CMTime.zero
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
let img = UIImage(cgImage: imageRef)
return img
}
catch let error as NSError
{
print("Image generation failed with error (error)")
return nil
}
}
}