Cómo leer y escribir files de audio utilizando NSInputStream y NSOutputStream

Grabo y guardo el file de audio de café usando AVAudioRecorder. Cuando convierto el formatting .wav (300 mb) a .wav, la aplicación se bloquea con un error (advertencia de memory recibida. Nivel = 1 y advertencia de memory recibida. Nivel = 2). Cómo utilizar NSInputStream para leer files de audio y NSOutputStream para escribir files de audio.

-(void) convertToWav:(NSNumber *) numIndex { // set up an AVAssetReader to read from the iPod Library int index = [numIndex integerValue]; NSString *strName; NSString *strFilePath1 =[delegate.strCassettePathSide stringByAppendingPathComponent:@"audio_list.plist"]; bool bTapeInfoFileExists = [[NSFileManager defaultManager] fileExistsAtPath:strFilePath1]; if (bTapeInfoFileExists) { NSMutableDictionary *dictInfo = [[NSMutableDictionary alloc] initWithContentsOfFile:strFilePath1]; if ([dictInfo valueForKey:@"lastName"]) strName =[dictInfo valueForKey:@"lastName"]; else strName= [delegate.arrNameList objectAtIndex:0]; } else { strName = [delegate.arrNameList objectAtIndex:0]; } NSString *cafFilePath =[[delegate.arrSessionList objectAtIndex:index] valueForKey:@"path"]; NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath]; AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; NSError *assetError = nil; AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset error:&assetError] ; if (assetError) { NSLog (@"error: %@", assetError); return; } AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks audioSettings: nil]; if (! [assetReader canAddOutput: assetReaderOutput]) { NSLog (@"can't add reader output... die!"); return; } [assetReader addOutput: assetReaderOutput]; NSString *strWavFileName = [NSString stringWithFormat:@"%@.wav",[[cafFilePath lastPathComponent] stringByDeletingPathExtension]]; NSString *wavFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strWavFileName]; if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath]) { [[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil]; } NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath]; AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL fileType:AVFileTypeWAVE error:&assetError]; if (assetError) { NSLog (@"error: %@", assetError); return; } AppDelegate *appDelegate =[[UIApplication shanetworkingApplication]delegate]; int nSampleRate=[[appDelegate.dictWAVQuality valueForKey:@"samplerate"] integerValue]; AudioChannelLayout channelLayout; memset(&channelLayout, 0, sizeof(AudioChannelLayout)); channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithFloat:nSampleRate], AVSampleRateKey, [NSNumber numberWithInt:2], AVNumberOfChannelsKey, [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved, [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil]; AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings]; if ([assetWriter canAddInput:assetWriterInput]) { [assetWriter addInput:assetWriterInput]; } else { NSLog(@"can't add asset writer input... die!"); return; } assetWriterInput.expectsMediaDataInRealTime = NO; [assetWriter startWriting]; [assetReader startReading]; AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0]; CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale); [assetWriter startSessionAtSourceTime: startTime]; __block UInt64 convertedByteCount = 0; dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL); [assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock: ^ { while (assetWriterInput.readyForMoreMediaData) { CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer]; if (nextBuffer) { // append buffer [assetWriterInput appendSampleBuffer: nextBuffer]; convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer); } else { [assetWriterInput markAsFinished]; // [assetWriter finishWriting]; [assetReader cancelReading]; [dictTemp setValue:wavFilePath forKey:@"path"]; [dictTemp setValue:nil forKey:@"progress"]; [delegate.arrSessionList replaceObjectAtIndex:index withObject:dictTemp]; NSString *strListFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:@"audiolist.plist"]; [delegate.arrSessionList writeToFile:strListFilePath atomically:YES]; break; } } }];} 

Supuse que está utilizando ARC, necesita liberar el búfer.

Core Foundation no se libera por ARC de acuerdo con "The Create Rule" en la Guía de progtwigción de gestión de memory para Core Foundation. Tienes que liberar CMSampleBufferRef obtenido de – (CMSampleBufferRef) copyNextSampleBuffer o obtendrás una pérdida de memory.

 https://developer.apple.com/library/mac/documentation/CoreFoundation/Conceptual/CFMemoryMgmt/Concepts/Ownership.html#//apple_ref/doc/uid/20001148-103029 if (nextBuffer) { // append buffer [assetWriterInput appendSampleBuffer: nextBuffer]; convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer); CFRelease(nextBuffer); }