kCVPixelFormatType_420YpCbCr8Planar = 'y420',
/* Planar Component Y'CbCr 8-bit 4:2:0. baseAddr points to a big-endian CVPlanarPixelBufferInfo_YCbCrPlanar struct */
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange = '420v',
/* Bi-Planar Component Y'CbCr 8-bit 4:2:0, video-range (luma=[16,235] chroma=[16,240]). baseAddr points to a big-endian CVPlanarPixelBufferInfo_YCbCrBiPlanar struct */
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange = '420f',
/* Bi-Planar Component Y'CbCr 8-bit 4:2:0, full-range (luma=[0,255] chroma=[1,255]). baseAddr points to a big-endian CVPlanarPixelBufferInfo_YCbCrBiPlanar struct */
#YpCbCr
Y份量:Y,U份量:Cb,V份量:Cr。即YUV格式的数据。spa
#8-bit
而且每一个点采用8bit来保存一个Y的亮度。code
#4:2:0
YUV的详细格式为:4:2:0。orm
# baseAddr points to a big-endian CVPlanarPixelBufferInfo_YCbCrPlanar struct
YUV数据的地址在CVPlanarPixelBufferInfo_YCbCrPlanar中以大端的形式存储。视频
#Planar & Bi-Planar
第一个是Planar模式,第二个是BiPlanar模式。
Planar格式就是单平面模式,在这个模式下,一个buf存储全部的数据。将Y、U、V份量分别打包,依次存储。即YYYY...U...V...即I420.
BiPlanar格式就是双平面模式,在这个模式下,亮度和色度被分红两个buf来存储。将Y和UV分别打包,一次存储。即YYYY...UV...即NV12.blog
#VideoRange & FullRange
亮度和色度的取值为8位,即2^8 = 256便可取值为【0-255】
VideoRange能取的值宽度为【16-235】
FullRange能取得值宽度为【0-255】图片
#采集信息查看
查看采集到的信息。ip
CMSampleBufferGetFormatDescription(sampleBuffer);
#如何从采集的CMSampleBufferRef中取得YUV数据
转化为CVImageBufferRef:get
CVImageBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
获取宽高:
CVPixelBufferGetWidth(pixelBuffer);
CVPixelBufferGetHeight(pixelBuffer);
取得YUV数据地址:
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer,Plane_index);
//这里的Plane_index与上文的Plane模式相关
若是是Plane模式则直接取到全部数据
若是是BiPlane则须要分两次,即Plane_index=0取得Y份量地址与Plane_index=1取得UV份量的地址
#注意事项
在操做pixelBuffer的时候记得加上锁
CVPixelBufferLockBaseAddress(pixelBuffer, lockFlag);
//在这里操做
CVPixelBufferUnlockBaseAddress(pixelBuffer, lockFlag);
3.
/**
* 把 CMSampleBufferRef 转化成 UIImage 的方法,参考自:
* https://stackoverflow.com/questions/19310437/convert-cmsamplebufferref-to-uiimage-with-yuv-color-space
* note1 : SDK要求 colorSpace 为 CGColorSpaceCreateDeviceRGB
* note2 : SDK须要 ARGB 格式的图片
*/
- (UIImage *) imageFromSamplePlanerPixelBuffer:(CMSampleBufferRef)sampleBuffer{
@autoreleasepool {
CMFormatDescriptionRef desc = CMSampleBufferGetFormatDescription(sampleBuffer);
NSLog(@">>%@",desc);
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the plane pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
// Get the number of bytes per row for the plane pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
uint8_t *rgbabuffer = baseAddress;
for (int y=0; y<100; y++) {
for (int x=0; x<width;x++) {
rgbabuffer[y*bytesPerRow+x*4+0] = 0;
rgbabuffer[y*bytesPerRow+x*4+1] = 0;
rgbabuffer[y*bytesPerRow+x*4+2] = 255;
rgbabuffer[y*bytesPerRow+x*4+3] = 1;
}
}
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
}