I’m working on an app creating smaller bitrate version of video from video PHAsset.
When I deploy the app from Xcode to the real iPhone / iPad testing device, the app works well.
But when I deploy the app to TestFlight or App Store and then download it to run, the video always cannot be saved to the Photos Library with a error log Error saving video to photo library: Optional(Error Domain=PHPhotosErrorDomain Code=3302 "(null)")
.
I googled this 3302 error code, it said The error message "PHPhotosErrorDomain error 3302" when saving a video to a photo library on iOS means that the operation failed because the file has an audio codec that isn't supported by the iOS version.
Here is my code:
//-------------------------------------------
import Foundation
//-------------------------------------------
#if useResizeVideoActivity
//-------------------------------------------
import UIKit
import AVKit
import Photos
//-------------------------------------------
var lastUserVideoBitRate: Int = 1200;
let tagLastUserVideoBitRate = "KAliwrk12P";
//-------------------------------------------
class ActivityResizeVideo: UIActivity
{
//-------------------------------------------
var sourceAsset: PHAsset?
var videoAsset: AVAsset?
var viewController: UIViewController?
//-------------------------------------------
fileprivate var avAssetVideoTrack: AVAssetTrack?
fileprivate var avAssetAudioTrack: AVAssetTrack?
//-------------------------------------------
override var activityType: UIActivity.ActivityType? { return UIActivity.ActivityType(strYesApp_Create); }
//-------------------------------------------
override var activityTitle: String? { return strCreateSmallFileVersion.local(); }
//-------------------------------------------
override var activityImage: UIImage? { return UIImage(systemName: strGoBackwardMinus); }
//-------------------------------------------
override var activityViewController: UIViewController? { return nil; }
//-------------------------------------------
override func canPerform(withActivityItems activityItems: [Any]) -> Bool
{
#if true
return (self.videoAsset != nil)&&(self.sourceAsset != nil);
#else
for item in activityItems{ if item is AVAsset { return true; } }
return false;
#endif
}
//-------------------------------------------
override func prepare(withActivityItems activityItems: [Any])
{
mmaLog("start ResizeVideoActivity.prepare() ...");
#if false
for item in activityItems{ if let asset = item as? AVAsset { videoAsset = asset; } }
#endif
self.avAssetVideoTrack = self.videoAsset?.tracks(withMediaType: .video).first;
self.avAssetAudioTrack = self.videoAsset?.tracks(withMediaType: .audio).first;
}
//-------------------------------------------
override func perform()
{
mmaLog("start ResizeVideoActivity.perform() ...");
presentBitRateSelection(){ success in self.activityDidFinish(success); }
}
//-------------------------------------------
private func presentBitRateSelection(completion: ((Bool) -> Void)? = nil)
{
guard let video_track = self.avAssetVideoTrack else { mmaLog("Error in video track."); completion?(false); return; }
guard let audio_track = self.avAssetAudioTrack else { mmaLog("Error in audio track."); completion?(false); return; }
guard let vc = self.viewController else { completion?(false); return; }
guard let a = self.videoAsset, let s = self.sourceAsset else { completion?(false); return; }
mmaLog("start presentBitRateSelection() ...");
let msg: String; var w: Int = lastUserVideoBitRate;
if let r = getCurrentBitRate()
{
msg = "(strCurrentBitRateKbps.local()):(r)nn(strPleaseSetANewBitRate.local()):";
if r < w { w = Int(CGFloat(r) * 0.75) / 100 * 100; if w < 100 { w = 100; } }
}
else{ msg = strCannotGetCurrentBitRate.local(); }
let alt = UIAlertController(title: nil, message: msg, preferredStyle: .alert);
alt.addTextField
{ text_feild in
text_feild.placeholder = ""; text_feild.keyboardType = .numberPad; text_feild.text = "(w)";
};
alt.addAction(UIAlertAction(title: strCancel.local(), style: .cancel) { _ in completion?(false); return; });
let createAction = UIAlertAction(title: strConfirm.local(), style: .default)
{ [weak alt] _ in
if let m = alt?.textFields?.first?.text
{
if let t = Int(m)
{
lastUserVideoBitRate = t; mmaLog("User input bit rate: (t)");
NotificationCenter.default.post(name: .someTaskStartProcessing, object: nil, userInfo: [tagTitle: strTaskInProcessing.local(), tagMessage: strPleaseKeepThisScreenOpenAndStorageSpaceNotTooLow.local()]);
DispatchQueue.global(qos: .userInitiated).async
{
createSmallerVideo(sourceAsset: s, avAsset: a, withBitRate: t * 1024, videoTrack: video_track, audioTrack: audio_track, in: vc)
{ success in
mmaLog("createSmallerVideo() completed with status: (success).");
DispatchQueue.main.asyncAfter(deadline: .now() + 1) // in case the interval with `someTaskStartProcessing` is too small.
{
NotificationCenter.default.post(name: .someTaskEndProcessing, object: nil, userInfo: [tagPlaySuccessSound : success, tagTitle: (success) ? strSmallFileVersionCreatedSuccessfully.local() : strCreationFailed.local(), tagMessage: (success) ? strPleaseSwipeToTheNextVideoToView.local() : strPleaseTryAgain.local()]);
#if useAdMob
if success
{
let s = self.sourceAsset?.assetFileSizeInByte ?? 0;
var w = s / 1024 / 1024;
if w > 2000 { w = 2000; }
else if w < 200 { w = 200; }
consumeUserAdPoints(w);
}
#endif
}
}
}
completion?(true); return;
}
else{ mmaLog("Error on user input: (w)"); completion?(false); return; }
}
else { mmaLog("Error on getting user input."); completion?(false); return; }
};
alt.addAction(createAction);
vc.present(alt, animated: true, completion: nil);
}
//-------------------------------------------
private func getCurrentBitRate() -> Int?
{
mmaLog("start getCurrentBitRate() ...");
#if false
guard let track = self.videoAsset?.tracks(withMediaType: .video).first else { return nil; }
let estimatedDataRate = track.estimatedDataRate;
mmaLog("estimatedDataRate: (estimatedDataRate)");
return Int(estimatedDataRate / 1024);
#else
guard let t = self.avAssetVideoTrack else { return nil; }
return Int(t.estimatedDataRate / 1024);
#endif
}
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
}
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
func createSmallerVideo(sourceAsset: PHAsset, avAsset: AVAsset, withBitRate bitRate: Int, videoTrack: AVAssetTrack, audioTrack: AVAssetTrack, in viewController: UIViewController, completion: ((Bool) -> Void)? = nil)
{
mmaLog("Start ResizeVideoActivity.createSmallerVideo() ...");
#if false
let out_path = NSTemporaryDirectory().appending("smallerVideo.mp4");
#else
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask);
let out_path = paths[0].appendingPathComponent("mmavideo.mp4").path;
#endif
if FileManager.default.fileExists(atPath: out_path) {
do { try FileManager.default.removeItem(atPath: out_path); }
catch { mmaLog("Error remove exsiting file."); completion?(false); return; }
}
let video_track = videoTrack;
let audio_track = audioTrack;
let out_url = URL(fileURLWithPath: out_path);
do
{
let reader = try AVAssetReader(asset: avAsset);
let writer = try AVAssetWriter(outputURL: out_url, fileType: .mp4);
let video_reader_output_settings: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB
];
let video_reader_output = AVAssetReaderTrackOutput(track: video_track, outputSettings: video_reader_output_settings);
if reader.canAdd(video_reader_output){ reader.add(video_reader_output); }
else{ mmaLog("Failed to add video output to reader."); completion?(false); return; }
#if false
let audio_reader_track_output_settings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderBitRateKey: 128000,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
];
#else
let audio_reader_track_output_settings: [String: Any] = [
AVFormatIDKey: kAudioFormatLinearPCM,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
];
#endif
let audio_reader_output = AVAssetReaderTrackOutput(track: audio_track, outputSettings: audio_reader_track_output_settings);
if reader.canAdd(audio_reader_output){ reader.add(audio_reader_output); }
else{ mmaLog("Failed to add audio output to reader."); completion?(false); return; }
let video_compression_properties: [String: Any] = [ AVVideoAverageBitRateKey: bitRate];
let video_writer_input_output_settings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: video_track.naturalSize.width,
AVVideoHeightKey: video_track.naturalSize.height,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey: video_compression_properties
];
let video_writer_input = AVAssetWriterInput(mediaType: .video, outputSettings: video_writer_input_output_settings);
//video_writer_input.expectsMediaDataInRealTime = false;
video_writer_input.transform = video_track.preferredTransform;
if writer.canAdd(video_writer_input){ writer.add(video_writer_input); }
else{ mmaLog("Failded to add video writer input to writer."); completion?(false); return; }
#if false
let audio_reader_input_output_settings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
//AVEncoderAudioQualityKey: AVAudioQuality.medium.rawValue,
];
#else
let audio_reader_input_output_settings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderBitRateKey: 128000,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
];
#endif
let audio_writer_input = AVAssetWriterInput(mediaType: .audio, outputSettings: audio_reader_input_output_settings);
//audio_writer_input.expectsMediaDataInRealTime = false;
if writer.canAdd(audio_writer_input){ writer.add(audio_writer_input); }
else{ mmaLog("Failed to add audio writer input to writer."); completion?(false); return; }
writer.startWriting();
reader.startReading();
writer.startSession(atSourceTime: CMTime.zero);
let video_queue = DispatchQueue(label: "video_queue");
var is_video_writer_input_finished = false;
video_writer_input.requestMediaDataWhenReady(on: video_queue)
{
while video_writer_input.isReadyForMoreMediaData
{
if let sample_buffer = video_reader_output.copyNextSampleBuffer() { video_writer_input.append(sample_buffer); }
else { video_writer_input.markAsFinished(); is_video_writer_input_finished = true; break; }
}
}
let audio_queue = DispatchQueue(label: "audio_queue");
var is_audio_writer_input_finished = false;
audio_writer_input.requestMediaDataWhenReady(on: audio_queue)
{
while audio_writer_input.isReadyForMoreMediaData
{
if let sample_buffer = audio_reader_output.copyNextSampleBuffer() { audio_writer_input.append(sample_buffer); }
else { audio_writer_input.markAsFinished(); is_audio_writer_input_finished = true; break; }
}
}
while true
{
if is_video_writer_input_finished && is_audio_writer_input_finished
{
writer.finishWriting
{
//DispatchQueue.main.async
DispatchQueue.global(qos: .userInitiated).async
{
if writer.status == .completed
{
saveVideoToPhotoLibrary(outputURL: out_url, sourceAsset: sourceAsset)
{ success in completion?(success); };
} else { mmaLog("Error on writer."); completion?(false); }
}
}
reader.cancelReading();
break;
}
}
} catch { mmaLog("Unkown error."); completion?(false); }
}
//-------------------------------------------
func saveVideoToPhotoLibrary(outputURL: URL, sourceAsset: PHAsset, completion: ((Bool) -> Void)? = nil)
{
mmaLog("start saveVideoToPhotoLibrary() ...");
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL);
creationRequest?.creationDate = sourceAsset.creationDate ?? Date() + 0.1;
creationRequest?.location = sourceAsset.location;
creationRequest?.isFavorite = sourceAsset.isFavorite;
creationRequest?.isHidden = sourceAsset.isHidden;
let collections = PHAssetCollection.fetchAssetCollectionsContaining(sourceAsset, with: .album, options: nil);
collections.enumerateObjects { (collection, index, stop) in
let albumChangeRequest = PHAssetCollectionChangeRequest(for: collection)
albumChangeRequest?.addAssets([creationRequest!.placeholderForCreatedAsset!] as NSArray)
}
}) { success, error in
if success { mmaLog("saveVideoToPhotoLibrary() successed."); completion?(true); }
else { mmaLog("Error saving video to photo library: (String(describing: error))"); completion?(false); }
}
}
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
//-------------------------------------------
#endif
//-------------------------------------------
I have tried some efforts, but cannot solve.
Hope someone can help. Thanks in advance!
Michael Ma is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.