iOS usando vImage – Acelere para convertir QCAR YUV a RGB

Estoy intentando probar el performance de convertir imágenes YUV producidas por Vuforia y convertirlas a UIImage utilizando las llamadas vImage de iOS Accelerate Framework. En el estado actual del código solo trato de que funcione. En este momento, la conversión produce una image a rayas oscura. ¿Hay algún detalle publicado acerca de cómo Vuforia ha presentado el formatting YUV en su implementación? Mi suposition inicial era que usaban el formatting bi-planar de los dispositivos iOS con formatting 420p. El código de testing relevante sigue.

 UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage) { UIImage *image = nil; if (cameraImage) { QCAR::PIXEL_FORMAT pixelFormat = cameraImage->getFormat(); CGColorSpaceRef colorSpace = NULL; switch (pixelFormat) { case QCAR::YUV: case QCAR::RGB888: colorSpace = CGColorSpaceCreateDeviceRGB(); break; case QCAR::GRAYSCALE: colorSpace = CGColorSpaceCreateDeviceGray(); break; case QCAR::RGB565: case QCAR::RGBA8888: case QCAR::INDEXED: std::cerr << "Image format conversion not implemented." << std::endl; break; case QCAR::UNKNOWN_FORMAT: std::cerr << "Image format unknown." << std::endl; break; } int bitsPerComponent = 8; int width = cameraImage->getWidth(); int height = cameraImage->getHeight(); const void *baseAddress = cameraImage->getPixels(); size_t totalBytes = QCAR::getBufferSize(width, height, pixelFormat); CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone; CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; CGImageRef imageRef = NULL; if (pixelFormat == QCAR::YUV) { int bytesPerPixel = 4; uint8_t *sourceDataAddress = (uint8_t *)baseAddress; static vImage_Buffer srcYp = { .width = static_cast<vImagePixelCount>(width), .height = static_cast<vImagePixelCount>(height), .data = const_cast<void *>(baseAddress) }; size_t lumaBytes = width * height; size_t chromianceBytes = totalBytes - lumaBytes; static vImage_Buffer srcCb = { .data = static_cast<void *>(sourceDataAddress + lumaBytes) }; static vImage_Buffer srcCr = { .data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2)) }; static vImage_Buffer dest = { .width = static_cast<vImagePixelCount>(width), .height = static_cast<vImagePixelCount>(height), .data = imageData }; //uint8_t permuteMap[] = { 1, 2, 3, 0 }; vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 }; vImage_YpCbCrToARGB info; vImage_Error error; error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, &info, kvImage420Yp8_Cb8_Cr8, kvImageARGB8888, kvImagePrintDiagnosticsToConsole); error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &dest, &info, NULL, 1, kvImageNoFlags); vImage_CGImageFormat format = { .bitsPerComponent = static_cast<uint32_t>(bitsPerComponent), .bitsPerPixel = static_cast<uint32_t>(3 * bitsPerComponent), .colorSpace = colorSpace, .bitmapInfo = bitmapInfo, .version = 0, .decode = NULL, .renderingIntent = renderingIntent }; imageRef = vImageCreateCGImageFromBuffer(&dest, &format, NULL, NULL, kvImageNoFlags, &error); if (error) { std::cerr << "Err." << std::endl; } } else { int bitsPerPixel = QCAR::getBitsPerPixel(pixelFormat); int bytesPerRow = cameraImage->getStride(); CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, totalBytes, NULL); imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpace, bitmapInfo, provider, NULL, false, renderingIntent); CGDataProviderRelease(provider); } if (imageRef != NULL) { image = [UIImage imageWithCGImage:imageRef]; CGImageRelease(imageRef); } if (colorSpace != NULL) { CGColorSpaceRelease(colorSpace); } } return image; }