iOS convierte rápidamente mp3 a aac

Estoy convirtiendo un mp3 a m4a en Swift con un código basado en esto

Funciona cuando genero un file PCM. Cuando cambio el formatting de export a m4a, genera un file pero no se reproducirá. ¿Por qué es corrupto?

Aquí está el código hasta ahora:

import AVFoundation import UIKit class ViewController: UIViewController { var rwAudioSerializationQueue:dispatch_queue_t! var asset:AVAsset! var assetReader:AVAssetReader! var assetReaderAudioOutput:AVAssetReaderTrackOutput! var assetWriter:AVAssetWriter! var assetWriterAudioInput:AVAssetWriterInput! var outputURL:NSURL! override func viewDidLoad() { super.viewDidLoad() let rwAudioSerializationQueueDescription = String(self) + " rw audio serialization queue" // Create the serialization queue to use for reading and writing the audio data. self.rwAudioSerializationQueue = dispatch_queue_create(rwAudioSerializationQueueDescription, nil) let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) let documentsPath = paths[0] print(NSBundle.mainBundle().pathForResource("input", ofType: "mp3")) self.asset = AVAsset(URL: NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("input", ofType: "mp3")! )) self.outputURL = NSURL(fileURLWithPath: documentsPath + "/output.m4a") print(self.outputURL) // [self.asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:^{ self.asset.loadValuesAsynchronouslyForKeys(["tracks"], completionHandler: { print("loaded") var success = true var localError:NSError? success = (self.asset.statusOfValueForKey("tracks", error: &localError) == AVKeyValueStatus.Loaded) // Check for success of loading the assets tracks. //success = ([self.asset statusOfValueForKey:@"tracks" error:&localError] == AVKeyValueStatusLoaded); if (success) { // If the tracks loaded successfully, make sure that no file exists at the output path for the asset writer. let fm = NSFileManager.defaultManager() let localOutputPath = self.outputURL.path if (fm.fileExistsAtPath(localOutputPath!)) { do { try fm.removeItemAtPath(localOutputPath!) success = true } catch { } } } if (success) { success = self.setupAssetReaderAndAssetWriter() } if (success) { success = self.startAssetReaderAndWriter() } }) } func setupAssetReaderAndAssetWriter() -> Bool { do { try self.assetReader = AVAssetReader(asset: self.asset) } catch { } do { try self.assetWriter = AVAssetWriter(URL: self.outputURL, fileType: AVFileTypeCoreAudioFormat) } catch { } var assetAudioTrack:AVAssetTrack? = nil let audioTracks = self.asset.tracksWithMediaType(AVMediaTypeAudio) if (audioTracks.count > 0) { assetAudioTrack = audioTracks[0] } if (assetAudioTrack != nil) { let decompressionAudioSettings:[String : AnyObject] = [ AVFormatIDKey:Int(kAudioFormatLinearPCM) ] self.assetReaderAudioOutput = AVAssetReaderTrackOutput(track: assetAudioTrack!, outputSettings: decompressionAudioSettings) self.assetReader.addOutput(self.assetReaderAudioOutput) var channelLayout = AudioChannelLayout() memset(&channelLayout, 0, sizeof(AudioChannelLayout)); channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; /*let compressionAudioSettings:[String : AnyObject] = [ AVFormatIDKey:Int(kAudioFormatMPEG4AAC) , AVEncoderBitRateKey:128000, AVSampleRateKey:44100 , // AVEncoderBitRatePerChannelKey:16, // AVEncoderAudioQualityKey:AVAudioQuality.High.rawValue, AVNumberOfChannelsKey:2, AVChannelLayoutKey: NSData(bytes:&channelLayout, length:sizeof(AudioChannelLayout)) ] var outputSettings:[String : AnyObject] = [ AVFormatIDKey: Int(kAudioFormatLinearPCM), AVSampleRateKey: 44100, AVNumberOfChannelsKey: 2, AVChannelLayoutKey: NSData(bytes:&channelLayout, length:sizeof(AudioChannelLayout)), AVLinearPCMBitDepthKey: 16, AVLinearPCMIsNonInterleaved: false, AVLinearPCMIsFloatKey: false, AVLinearPCMIsBigEndianKey: false ]*/ let outputSettings:[String : AnyObject] = [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 44100, AVNumberOfChannelsKey: 2, AVChannelLayoutKey: NSData(bytes:&channelLayout, length:sizeof(AudioChannelLayout)) ] self.assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: outputSettings) self.assetWriter.addInput(self.assetWriterAudioInput) } return true } func startAssetReaderAndWriter() -> Bool { self.assetWriter.startWriting() self.assetReader.startReading() self.assetWriter.startSessionAtSourceTime(kCMTimeZero) self.assetWriterAudioInput.requestMediaDataWhenReadyOnQueue(self.rwAudioSerializationQueue, usingBlock: { while(self.assetWriterAudioInput.readyForMoreMediaData ) { var sampleBuffer = self.assetReaderAudioOutput.copyNextSampleBuffer() if(sampleBuffer != nil) { self.assetWriterAudioInput.appendSampleBuffer(sampleBuffer!) sampleBuffer = nil } else { self.assetWriterAudioInput.markAsFinished() self.assetReader.cancelReading() print("done") break } } }) return true } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } } 

actualizar

Está creando un file caf lugar de un m4a .

Reemplazar AVFileTypeCoreAudioFormat con AVFileTypeAppleM4A en

 AVAssetWriter(URL: self.outputURL, fileType: AVFileTypeCoreAudioFormat) 

Llama a self.assetWriter.finishWritingWithCompletionHandler() cuando hayas terminado.

Actualizó el código fuente en la pregunta a Swift 4 y lo envolvió en una class. El crédito va a Castles y Rythmic Fistman para el código fuente original y la respuesta. Los comentarios del autor izquierdo, agregaron algunas declaraciones de aserción e printing para la debugging. Probado en iOS.

La velocidad de bits para el file de salida está codificada a 96 kb / s, puede anular fácilmente este valor. La mayoría de los files de audio que estoy convirtiendo son de 320kb / s, por lo que estoy usando esta class para comprimir los files para el almacenamiento sin connection. Resultados de compression en la parte inferior de esta respuesta.

Uso:

 let inputFilePath = URL(fileURLWithPath: "/path/to/file.mp3") let outputFileURL = URL(fileURLWithPath: "/path/to/output/compressed.mp4") if let audioConverter = AVAudioFileConverter(inputFileURL: inputFilePath, outputFileURL: outputFileURL) { audioConverter.convert() } 

Clase

 import AVFoundation final class AVAudioFileConverter { var rwAudioSerializationQueue: DispatchQueue! var asset:AVAsset! var assetReader:AVAssetReader! var assetReaderAudioOutput:AVAssetReaderTrackOutput! var assetWriter:AVAssetWriter! var assetWriterAudioInput:AVAssetWriterInput! var outputURL:URL var inputURL:URL init?(inputFileURL: URL, outputFileURL: URL) { inputURL = inputFileURL outputURL = outputFileURL if (FileManager.default.fileExists(atPath: inputURL.absoluteString)) { print("Input file does not exist at file path \(inputURL.absoluteString)") return nil } } func convert() { let rwAudioSerializationQueueDescription = " rw audio serialization queue" // Create the serialization queue to use for reading and writing the audio data. rwAudioSerializationQueue = DispatchQueue(label: rwAudioSerializationQueueDescription) assert(rwAudioSerializationQueue != nil, "Failed to initialize Dispatch Queue") asset = AVAsset(url: inputURL) assert(asset != nil, "Error creating AVAsset from input URL") print("Output file path -> ", outputURL.absoluteString) asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { var success = true var localError:NSError? success = (self.asset.statusOfValue(forKey: "tracks", error: &localError) == AVKeyValueStatus.loaded) // Check for success of loading the assets tracks. if (success) { // If the tracks loaded successfully, make sure that no file exists at the output path for the asset writer. let fm = FileManager.default let localOutputPath = self.outputURL.path if (fm.fileExists(atPath: localOutputPath)) { do { try fm.removeItem(atPath: localOutputPath) success = true } catch { print("Error trying to remove output file at path -> \(localOutputPath)") } } } if (success) { success = self.setupAssetReaderAndAssetWriter() } else { print("Failed setting up Asset Reader and Writer") } if (success) { success = self.startAssetReaderAndWriter() return } else { print("Failed to start Asset Reader and Writer") } }) } func setupAssetReaderAndAssetWriter() -> Bool { do { assetReader = try AVAssetReader(asset: asset) } catch { print("Error Creating AVAssetReader") } do { assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.m4a) } catch { print("Error Creating AVAssetWriter") } var assetAudioTrack:AVAssetTrack? = nil let audioTracks = asset.tracks(withMediaType: AVMediaType.audio) if (audioTracks.count > 0) { assetAudioTrack = audioTracks[0] } if (assetAudioTrack != nil) { let decompressionAudioSettings:[String : Any] = [ AVFormatIDKey:Int(kAudioFormatLinearPCM) ] assetReaderAudioOutput = AVAssetReaderTrackOutput(track: assetAudioTrack!, outputSettings: decompressionAudioSettings) assert(assetReaderAudioOutput != nil, "Failed to initialize AVAssetReaderTrackOutout") assetReader.add(assetReaderAudioOutput) var channelLayout = AudioChannelLayout() memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size); channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; let outputSettings:[String : Any] = [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 44100, AVEncoderBitRateKey: 96000, AVNumberOfChannelsKey: 2, AVChannelLayoutKey: NSData(bytes:&channelLayout, length:MemoryLayout<AudioChannelLayout>.size)] assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: outputSettings) assert(rwAudioSerializationQueue != nil, "Failed to initialize AVAssetWriterInput") assetWriter.add(assetWriterAudioInput) } print("Finsihed Setup of AVAssetReader and AVAssetWriter") return true } func startAssetReaderAndWriter() -> Bool { print("STARTING ASSET WRITER") assetWriter.startWriting() assetReader.startReading() assetWriter.startSession(atSourceTime: kCMTimeZero) assetWriterAudioInput.requestMediaDataWhenReady(on: rwAudioSerializationQueue, using: { while(self.assetWriterAudioInput.isReadyForMoreMediaData ) { var sampleBuffer = self.assetReaderAudioOutput.copyNextSampleBuffer() if(sampleBuffer != nil) { self.assetWriterAudioInput.append(sampleBuffer!) sampleBuffer = nil } else { self.assetWriterAudioInput.markAsFinished() self.assetReader.cancelReading() self.assetWriter.finishWriting { print("Asset Writer Finished Writing") } break } } }) return true } } 

Archivo de input: 17.3 MB

 // generated with afinfo on mac File: D290A73C37B777F1.mp3 File type ID: MPG3 Num Tracks: 1 ---- Data format: 2 ch, 44100 Hz, '.mp3' (0x00000000) 0 bits/channel, 0 bytes/packet, 1152 frames/packet, 0 bytes/frame no channel layout. estimated duration: 424.542025 sec audio bytes: 16981681 audio packets: 16252 bit rate: 320000 bits per second packet size upper bound: 1052 maximum packet size: 1045 audio data file offset: 322431 optimized audio 18720450 valid frames + 576 priming + 1278 remainder = 18722304 ---- 

Archivo de salida: 5.1 MB

 // generated with afinfo on Mac File: compressed.m4a File type ID: m4af Num Tracks: 1 ---- Data format: 2 ch, 44100 Hz, 'aac ' (0x00000000) 0 bits/channel, 0 bytes/packet, 1024 frames/packet, 0 bytes/frame Channel layout: Stereo (LR) estimated duration: 424.542041 sec audio bytes: 5019294 audio packets: 18286 bit rate: 94569 bits per second packet size upper bound: 763 maximum packet size: 763 audio data file offset: 44 not optimized audio 18722304 valid frames + 2112 priming + 448 remainder = 18724864 format list: [ 0] format: 2 ch, 44100 Hz, 'aac ' (0x00000000) 0 bits/channel, 0 bytes/packet, 1024 frames/packet, 0 bytes/frame Channel layout: Stereo (LR) ----