IOS摄像头采集YUV420数据

    xiaoxiao2021-10-29  51

    这里简单说下IOS摄像头采集YUV420数据。

    步骤: 

    1.初始采集数据需要的对象

    2.设置采集回调的代理并开始

    3.在回调中获取到采集的数据(注意:这里采集到的数据格式NV12并不是YUV420格式数据)

    4.将NV12转成我们需要的YUV420

    代码如下:

     //摄像头采集

        capturSession=[[AVCaptureSessionalloc]init];

        

        capturSession.sessionPreset=AVCaptureSessionPreset1920x1080;//设置采集的分辨率

        captureDevice=[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];

        NSError *error=nil;

        captureDeviceInput=[AVCaptureDeviceInputdeviceInputWithDevice:captureDeviceerror:&error];

        if (captureDeviceInput) {

            [capturSessionaddInput:captureDeviceInput];

        }else{

            NSLog(@"%@",error);

        }

        

        //初始化流输出对象

        captureVideoDataOutput=[[AVCaptureVideoDataOutputalloc]init];

        [captureVideoDataOutputsetAlwaysDiscardsLateVideoFrames:YES];

        //设置输出参数

        NSDictionary *settingsDic = [[NSDictionaryalloc] initWithObjectsAndKeys:

                                     [NSNumbernumberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],

                                     kCVPixelBufferPixelFormatTypeKey,

                                     nil];// X264_CSP_NV12

        captureVideoDataOutput.videoSettings = settingsDic;

        

        dispatch_queue_t queue =dispatch_queue_create("myQueue",NULL);

        

        [captureVideoDataOutputsetSampleBufferDelegate:selfqueue:queue];

        [capturSessionaddOutput:captureVideoDataOutput];

        

        

        /* ---------------------------------------------------------------------- */

        previewLayer = [[AVCaptureVideoPreviewLayeralloc] initWithSession:capturSession];

        //设置显示的视图位置

        previewLayer.frame =CGRectMake(50,330, 160,200);

        

        [self.view.layeraddSublayer:previewLayer];

        //开始采集

        [capturSessionstartRunning];

    //回调函数

    #pragma mark --  AVCaptureVideo(Audio)DataOutputSampleBufferDelegate method

    - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

    {

        if (captureOutput == captureVideoDataOutput) {

            CVImageBufferRef imageBuffer =CMSampleBufferGetImageBuffer(sampleBuffer);

            

            // 获取采集的数据

            CVPixelBufferLockBaseAddress(imageBuffer,0);

            CMTime pts =CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

            CMTime duration =CMSampleBufferGetDuration(sampleBuffer);

            void *imageAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);//YYYYYYYY

            size_t row0=CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);

            

            void *imageAddress1=CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1);//UVUVUVUV

            size_t row1=CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,1);

            

            size_t width =CVPixelBufferGetWidth(imageBuffer);

            size_t height =CVPixelBufferGetHeight(imageBuffer);

            

            CVPixelBufferUnlockBaseAddress(imageBuffer,0);

            size_t a=width*height;

            //开始将NV12转换成YUV420

            uint8_t *yuv420Data=(uint8_t*)malloc(a*1.5);

            

            for (int i=0; i<height; ++i) {

                memcpy(yuv420Data+i*width, imageAddress+i*row0, width);

            }

            

            uint8_t *UV=imageAddress1;

            uint8_t *U=yuv420Data+a;

            uint8_t *V=U+a/4;

            for (int i=0; i<0.5*height; i++) {

                for (int j=0; j<0.5*width; j++) {

    //                printf("%d\n",j<<1);

                    *(U++)=UV[j<<1];

                    *(V++)=UV[(j<<1)+1];

                }

                UV+=row1;

            }

            //这里根据自己的情况对YUV420数据进行处理

            

            

    //...........

            //最后记得释放哦

            free(yuv420Data);

            

        }

    }

    转载请注明原文地址: https://ju.6miu.com/read-677860.html

    最新回复(0)