¿Cómo agregar ilustraciones en el file de audio, para mostrar en la portada del álbum?

Tengo un file de audio y quiero cambiar las ilustraciones de la portada del álbum. Entonces, ¿es posible? Y, ¿cómo puedo configurar la ilustración en la portada del álbum para el file de audio en la progtwigción de iOS?

En realidad, combiné dos files de audio y quiero agregar ilustraciones para la portada del álbum que se mostrarán en iTune.

El código se proporciona a continuación:

- (BOOL) combineVoices1 { NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); NSString *libraryCachesDirectory = [paths objectAtIndex:0]; libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) { return NO; } [[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; //CMTime nextClipStartTime = kCMTimeZero; AVMutableComposition *composition = [[AVMutableComposition alloc] init]; AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; CMTime nextClipStartTimeMix1; if (playbackDelayAfterTimeMix1 > 0) { nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); }else{ nextClipStartTimeMix1 = kCMTimeZero; } CMTime startTimeMix1; if (playbackDelayMix1 > 0) { startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); }else{ startTimeMix1 = kCMTimeZero; } [compositionAudioTrack setPrefernetworkingVolume:[NSTShanetworkingData instance].volumeOfMIX1]; NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; AVAssetTrack *clipAudioTrack; if (tracks.count > 0) { clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; }else{ return NO; } [compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; //avAsset.commonMetadata AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; CMTime nextClipStartTimeMix2; if (playbackDelayAfterTimeMix2 > 0) { nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); }else{ nextClipStartTimeMix2 = kCMTimeZero; } CMTime startTimeMix2; if (playbackDelayMix2 > 0) { startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); }else{ startTimeMix2 = kCMTimeZero; } [compositionAudioTrack1 setPrefernetworkingVolume:[NSTShanetworkingData instance].volumeOfMIX2]; //NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; AVAssetTrack *clipAudioTrack1; if (tracks1.count > 0) { clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; }else{ return NO; } [compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A]; if (nil == exportSession) return NO; exportSession.outputURL = audioFileOutput; exportSession.outputFileType = AVFileTypeAppleM4A; [exportSession exportAsynchronouslyWithCompletionHandler:^ { if (AVAssetExportSessionStatusCompleted == exportSession.status) { [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; } else if (AVAssetExportSessionStatusFailed == exportSession.status) { [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; [[NSTShanetworkingData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@",[[exportSession error] localizedDescription]]]; //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); } }]; return YES; } 

Resolví mi problema y ahora funciona bien, agregué el código cerca de "AVAssetExportSession" en el código anterior. Y finalmente el método es:

 - (BOOL) combineVoices1 { NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); NSString *libraryCachesDirectory = [paths objectAtIndex:0]; libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) { return NO; } [[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; //CMTime nextClipStartTime = kCMTimeZero; AVMutableComposition *composition = [[AVMutableComposition alloc] init]; AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; CMTime nextClipStartTimeMix1; if (playbackDelayAfterTimeMix1 > 0) { nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); }else{ nextClipStartTimeMix1 = kCMTimeZero; } CMTime startTimeMix1; if (playbackDelayMix1 > 0) { startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); }else{ startTimeMix1 = kCMTimeZero; } [compositionAudioTrack setPrefernetworkingVolume:[NSTShanetworkingData instance].volumeOfMIX1]; NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; AVAssetTrack *clipAudioTrack; if (tracks.count > 0) { clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; }else{ return NO; } [compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; //avAsset.commonMetadata AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio prefernetworkingTrackID:kCMPersistentTrackID_Invalid]; CMTime nextClipStartTimeMix2; if (playbackDelayAfterTimeMix2 > 0) { nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); }else{ nextClipStartTimeMix2 = kCMTimeZero; } CMTime startTimeMix2; if (playbackDelayMix2 > 0) { startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); }else{ startTimeMix2 = kCMTimeZero; } [compositionAudioTrack1 setPrefernetworkingVolume:[NSTShanetworkingData instance].volumeOfMIX2]; //NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; AVAssetTrack *clipAudioTrack1; if (tracks1.count > 0) { clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; }else{ return NO; } [compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; /** added MetadataItem **/ AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init]; artistMetadata.key = AVMetadataiTunesMetadataKeyArtist; artistMetadata.keySpace = AVMetadataKeySpaceiTunes; artistMetadata.locale = [NSLocale currentLocale]; artistMetadata.value = uTakeTheMicArtist; AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init]; albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum; albumMetadata.keySpace = AVMetadataKeySpaceiTunes; albumMetadata.locale = [NSLocale currentLocale]; albumMetadata.value = uTakeTheMicAlbum; AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init]; songMetadata.key = AVMetadataiTunesMetadataKeySongName; songMetadata.keySpace = AVMetadataKeySpaceiTunes; songMetadata.locale = [NSLocale currentLocale]; songMetadata.value = textFieldMixFile.text; AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init]; imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt; imageMetadata.keySpace = AVMetadataKeySpaceiTunes; imageMetadata.locale = [NSLocale currentLocale]; imageMetadata.value = imageData; //imageData is NSData of UIImage. NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil]; AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A]; if (nil == exportSession) return NO; exportSession.metadata = metadata; exportSession.outputURL = audioFileOutput; exportSession.outputFileType = AVFileTypeAppleM4A; [exportSession exportAsynchronouslyWithCompletionHandler:^ { if (AVAssetExportSessionStatusCompleted == exportSession.status) { [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; } else if (AVAssetExportSessionStatusFailed == exportSession.status) { [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; [[NSTShanetworkingData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@.",[[exportSession error] localizedDescription]]]; //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); } }]; return YES; }