高通camera框架之如何打通App-Hardware经络

    xiaoxiao2025-05-30  7

               许久未更新博客,今天再次重磅推出鄙人的浅显的一些总结,以供参考,该文主要是基于camera一个小功能来详细讲解java层接口如何步步调用至hardware层接口,涉及到一些机制的简单介绍,希望可以为您提供更多的参考,希望为技术类资源的整合做点微小的贡献。

    目录...3

    1.        Introduction. 4

    2.        Feature特性描述... 5

    2.1 特性简介....5

    2.2 特性分析....5

    3.        ObjectTrackingfeature框架分析... 6

    3.1 总体框图....6

    4.        Feature基本流程... 7

    4.1 Object tracking feature函数执行流程....7

    4.1.1 函数初始化调用流程....7

    4.1.2 数据Callback流程....10

    5.        Design idea. 21

    5.1 Callback设计机制....21

    5.2 Eventhandler设计机制....24

    5.3 JNI调用java接口函数....25

    5.4 Client端数据处理....29

    5.5 Service端数据处理....30

    2.1 特性简介

    1)   Feature name:

    ObjectTracking;

    2)   Feature简介:手动对焦物体,移动设备的过程中会自动追踪物体,该feature主要通过ArcSoft算法处理返回的坐标信息确定物体位置;

    2.2 特性分析

    1)   处理过程:

    选择追踪物体:

    Application层选择物体 -> 物体坐标信息 -> ArcSoft算法确定要追踪物体信息

    定位物体:

    ArcSoft算法获取物体位置信息 –> 物体坐标信息 -> callback至Application层

    2)   应用层:

    a、Draw rectangle on detected and tracking area;

    b、Add touch focus mode with object tracking or just replace current touchfocus mode;

    后续完善该部分内容

    4.1.1 函数初始化调用流程

    Focus mode: objecttracking

    vim./vendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/controller/EventDispatcher.java

    static class ObjectTrackingEventimplementsControllerEventSender {

        @Override

        public voidsend(EventAction action,ControllerEventSourcesource,Rect position) {

            switch(action){

                case DOWN:

                    break;

                case UP:

                    Executor.sendEvent(ControllerEvent.EV_OBJECT_TRACKING_START,source,

                           0, position);

                    break;

                case CANCEL:

                    Executor.sendEvent(ControllerEvent.EV_OBJECT_TRACKING_LOST,source);

                    break;

            }

        }

    }

    Thenext is ControllerMessageHandler.java dispatch EV_AF_START event:

    vim./vendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/controller/ControllerMessageHandler.java

    private synchronized voiddispatch(ControllerMessage message) {

        ……

        switch(message.mEventId) {

            caseEV_ABORT:

                mCurrentState.handleAbort(message);

                break;

            …...

           caseEV_OBJECT_TRACKING_LOST:

              mCurrentState.handleObjectTrackingLost(message);

              break;

           caseEV_OBJECT_TRACKING_START:

               mCurrentState.handleObjectTrackingStart(message);

           break;

           …...

       }

    }

    vim./vendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/controller/StateTransitionController.java

    public void handleObjectTrackingStart(ControllerMessage message) {

        mObjectTracking.start((Rect) message.mArg2);

    }

    vim./vendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/controller/ObjectTracking.java

    public void start(Rect position) {

        if(position != null) {

            mPosition=position;

            Rect rectIS =newRect();

            if(mController.mCameraDevice.isObjectTrackingRunning() &&

                    !mController.getParams().getTouchCapture().getBooleanValue()&&

                    rectIS.intersects(mPosition,

                           PositionConverter.getInstance().

                           convertDeviceToFace(mTrackingPosition))){

                Executor.sendEmptyEvent(ControllerEvent.EV_OBJECT_TRACKING_LOST);

                return;

            }

            if(mIsAlreadyLost) {

                startTracking(position);

            }else {

                // Wait till previous object is lost when restart.

                stop(false);

                mShouldWaitForLost= true;

            }

        }

    }

    private void startTracking(Rect position) {

        mCallback=new ObjectTrackingCallback();

        mController.mCameraWindow.startObjectTrackingAnimation(position);

        mController.mCameraDevice.startObjectTracking(

                PositionConverter.getInstance().convertFaceToDevice(position),

                mCallback);

    }

    vim./vendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/device/CameraDevice.java

    public void startObjectTracking(Rect position,ObjectTrackingCallbackcb) {

        ……

        mCameraExtension.setObjectTrackingCallback(cb, //设置callback函数,用于从ArcSoft获取位置信息

                CameraDeviceUtil.OBJECT_TRACKING_LOW_PASS_FILTER_STRENGTH,

                CameraDeviceUtil.OBJECT_TRACKING_MINIMAL_INTERVAL_MS);

        mCameraExtension.startObjectTracking();  //选择tracking位置,设置到ArcSoft

        mCameraExtension.selectObject(position.centerX(),position.centerY()); 

        mIsObjectTrackingRunning= true;

        ……

        newEachCameraStatusPublisher(mCameraActivity,mCameraId)

                .put(new ObjectTracking(ObjectTracking.Value.ON))

                .publish();

    }

    vimvendor/semc/frameworks/base/libs/camera-extension/api/src/com/sonyericsson/cameraextension/CameraExtension.java

    public final voidstartObjectTracking() {

        ……

        if(mCamera != null) {

            mCamera.startObjectTracking();

        }

    }

    vimframeworks/base/core/java/android/hardware/Camera.java

    private native final void_startObjectTracking();

     publicvoidstartObjectTracking() {

           _startObjectTracking();

      }

    vimframeworks/base/core/jni/android_hardware_Camera.cpp

    static voidandroid_hardware_Camera_startObjectTracking(JNIEnv *env,jobject thiz)

    {

        JNICameraContext* context;

        sp<Camera>camera = get_native_camera(env,thiz, &context);

        if(camera == 0)return;

        boolisSuccess = context->setUpObjectTracking(env);

        ……

        if(camera->sendCommand(CAMERA_CMD_START_OBJECT_TRACKING,0, 0) != NO_ERROR) {

            jniThrowRuntimeException(env,"start objecttracking failed");

        }

    }

     vim frameworks/av/camera/Camera.cpp

    status_t Camera::sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)

    {

        sp <ICamera> c = mCamera;

        if(c == 0)return NO_INIT;

        returnc->sendCommand(cmd,arg1, arg2);

    }

    vimframeworks/av/services/camera/libcameraservice/api1/CameraClient.cpp

         status_t CameraClient::sendCommand(int32_t cmd,int32_t arg1,int32_t arg2) {

             …...

           elseif(cmd == CAMERA_CMD_START_OBJECT_TRACKING) {

           enableMsgType(CAMERA_MSG_OBJECT_TRACKING);

           mLowPassFilterObjectTracking->isStartObjectTracking =true;

          }

           …...

          /* MM-MC-SomcAddForSoMCAP-00+} */

          return mHardware->sendCommand(cmd,arg1, arg2);

    }

    void CameraClient::enableMsgType(int32_t msgType) {

        android_atomic_or(msgType,&mMsgEnabled);

        mHardware->enableMsgType(msgType);

    }

    vimframeworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h

    void enableMsgType(int32_t msgType)

    {

        if(mDevice->ops->enable_msg_type)

            mDevice->ops->enable_msg_type(mDevice,msgType);

    }

    status_t sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)

    {

        if(mDevice->ops->send_command)

            returnmDevice->ops->send_command(mDevice,cmd, arg1, arg2);

        returnINVALID_OPERATION;

    }

    vimhardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

    camera_device_ops_tQCamera2HardwareInterface::mCameraOps = {

        set_preview_window:        QCamera2HardwareInterface::set_preview_window,

        set_callbacks:             QCamera2HardwareInterface::set_CallBacks,

        enable_msg_type:           QCamera2HardwareInterface::enable_msg_type,

        disable_msg_type:          QCamera2HardwareInterface::disable_msg_type,

        ……

        get_parameters:            QCamera2HardwareInterface::get_parameters,

        put_parameters:            QCamera2HardwareInterface::put_parameters,

        send_command:            QCamera2HardwareInterface::send_command,

        release:                   QCamera2HardwareInterface::release,

        dump:                    QCamera2HardwareInterface::dump,

    };

    4.1.2数据Callback流程

       下面从hal层接口开始往上层进行分析数据回调过程:

        1)、CameraHardwareInterface提供的回调函数:

    vimframeworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h

    Callback函数:

    static void__notify_cb(int32_t msg_type,int32_t ext1,

                            int32_t ext2,void*user)

    {

        CameraHardwareInterface *__this =

                static_cast<CameraHardwareInterface *>(user);

        __this->mNotifyCb(msg_type,ext1, ext2, __this->mCbUser);

    }

    static void__data_cb(int32_t msg_type,

                         constcamera_memory_t *data,unsignedint index,

                         camera_frame_metadata_t *metadata,

                         void*user)

    {

        CameraHardwareInterface *__this =

                static_cast<CameraHardwareInterface *>(user);

        sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));

        if(index >= mem->mNumBufs) {

            return;

        }

        __this->mDataCb(msg_type,mem->mBuffers[index],metadata,__this->mCbUser);

    }

       Set callback函数:

    void setCallbacks(notify_callback notify_cb,

                     data_callback data_cb,

                     data_callback_timestamp data_cb_timestamp,

                     void* user)

    {

        mNotifyCb = notify_cb;

        mDataCb = data_cb;

        mDataCbTimestamp = data_cb_timestamp;

        mCbUser = user;

        if(mDevice->ops->set_callbacks) {

            mDevice->ops->set_callbacks(mDevice, //调用halset_callback函数

                                  __notify_cb,

                                  __data_cb,

                                  __data_cb_timestamp,

                                  __get_memory,

                                  this);

        }

    }

          上面的set_callback函数最终会调用hal层的QCamera2HardwareInterface::setCallBacks函数,简要分析Hal层callback函数相关处理:

           vimhardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

    int QCamera2HardwareInterface::setCallBacks(camera_notify_callbacknotify_cb,

                                                camera_data_callbackdata_cb,

                                               camera_data_timestamp_callbackdata_cb_timestamp,

                                               camera_request_memoryget_memory,

                                                void*user)

    {

        mNotifyCb        = notify_cb;

        mDataCb          = data_cb;

        mDataCbTimestamp =data_cb_timestamp;

        mGetMemory       = get_memory;

        mCallbackCookie  = user;

        m_cbNotifier.setCallbacks(notify_cb,data_cb, data_cb_timestamp,user);

        returnNO_ERROR;

    }

         vimhardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp

    void QCameraCbNotifier::setCallbacks(camera_notify_callbacknotifyCb,

                                        camera_data_callbackdataCb,

                                         camera_data_timestamp_callbackdataCbTimestamp,

                                        void *callbackCookie)

    {

        if( (NULL == mNotifyCb ) &&

             ( NULL ==mDataCb ) &&

             ( NULL ==mDataCbTimestamp ) &&

             ( NULL == mCallbackCookie) ) {

            mNotifyCb =notifyCb;

            mDataCb =dataCb;

            mDataCbTimestamp= dataCbTimestamp;

            mCallbackCookie= callbackCookie;

            mActive =true;

            mProcTh.launch(cbNotifyRoutine,this); //开启线程处理callback信息

        }else {

            ALOGE("%s: Camera callback notifier already initialized!",

                  __func__);

        }

    }

    void *QCameraCbNotifier::cbNotifyRoutine(void* data)

    {

        intrunning =1;

        intret;

        QCameraCbNotifier *pme =(QCameraCbNotifier *)data;

        QCameraCmdThread *cmdThread =&pme->mProcTh;

        cmdThread->setName("CAM_cbNotify");

        uint8_t isSnapshotActive =FALSE;

        boollongShotEnabled =false;

        uint32_t numOfSnapshotExpected=0;

        uint32_t numOfSnapshotRcvd =0;

        int32_t cbStatus = NO_ERROR;

        CDBG("%s:E",__func__);

        do{

            do{

                ret =cam_sem_wait(&cmdThread->cmd_sem);

            ……

            }while (ret !=0);

            camera_cmd_type_tcmd = cmdThread->getCmd();

            switch(cmd) {

            caseCAMERA_CMD_TYPE_START_DATA_PROC:

                {

                    isSnapshotActive= TRUE;

                    numOfSnapshotExpected= pme->mParent->numOfSnapshotsExpected();

    /*MM-YW-Integrate Arcsoft Snapshot Fature-00+{*/               

    #ifdef USE_ARCSOFT_FEATURE

                    if(NULL !=pme->mParent->mArcSoft_Feature)

                       numOfSnapshotExpected+= pme->mParent->mArcSoft_Feature->mSnapshotInfo.extra_burst_cnt;

    #endif

    /*MM-YW-Integrate Arcsoft Snapshot Fature-00+}*/

                    longShotEnabled= pme->mParent->isLongshotEnabled();

                    numOfSnapshotRcvd= 0;

                }

                break;

            caseCAMERA_CMD_TYPE_STOP_DATA_PROC:

                {

                    pme->mDataQ.flushNodes(matchSnapshotNotifications);

                    isSnapshotActive= FALSE;

                    numOfSnapshotExpected= 0;

                    numOfSnapshotRcvd= 0;

                }

                break;

            caseCAMERA_CMD_TYPE_DO_NEXT_JOB:

                {

                    qcamera_callback_argm_t*cb = //从队取出信息

                        (qcamera_callback_argm_t*)pme->mDataQ.dequeue();

                    cbStatus =NO_ERROR;

                    if (NULL != cb) {

                       CDBG("%s:cb type %d received",

                             __func__,

                             cb->cb_type);

                       if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)){

                           switch (cb->cb_type){

                           case QCAMERA_NOTIFY_CALLBACK:

                               {

                                   if (cb->msg_type== CAMERA_MSG_FOCUS) {

                                       ATRACE_INT("Camera:AutoFocus",0);

                                       CDBG_HIGH("[KPIPerf] %s : PROFILE_SENDING_FOCUS_EVT_TO APP",

                                               __func__);

                                    }

                                   if (pme->mNotifyCb){

                                       pme->mNotifyCb(cb->msg_type,

                                                     cb->ext1,

                                                     cb->ext2,

                                                     pme->mCallbackCookie);

                                   } else{

                                       ALOGE("%s: notify callback not set!",

                                             __func__);

                                    }

                               }

                               break;

                           case QCAMERA_DATA_CALLBACK:

                               {

                                   if (pme->mDataCb){

                                        pme->mDataCb(cb->msg_type,

                                                    cb->data,

                                                    cb->index,

                                                    cb->metadata,

                                                     pme->mCallbackCookie);

                                   } else{

                                       ALOGE("%s: data callback not set!",

                                             __func__);

                                   }

                                }

                               break;

                           case QCAMERA_DATA_TIMESTAMP_CALLBACK:

                               {

                                   if(pme->mDataCbTimestamp){

                                       pme->mDataCbTimestamp(cb->timestamp,

                                                             cb->msg_type,

                                                             cb->data,

                                                             cb->index,

                                                              pme->mCallbackCookie);

                                   } else{

                                       ALOGE("%s:datacb with tmp not set!",

                                             __func__);

                                   }

                                }

                               break;

                           case QCAMERA_DATA_SNAPSHOT_CALLBACK:

                               {

                                   if (TRUE ==isSnapshotActive && pme->mDataCb ) {

                                        if(!longShotEnabled) {

                                           numOfSnapshotRcvd++;

                                           /*MM-YW-IntegrateArcsoft Snapshot Fature-01+{*/

                                           #ifdefUSE_ARCSOFT_FEATURE

                                            if((NULL !=pme->mParent->mArcSoft_Feature) &&pme->mParent->mArcSoft_Feature->mSnapshotInfo.is_snapshot_done)

                                           {

                                               pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,NULL);

                                               pme->mParent->mArcSoft_Feature->ArcSoft_SendSnapshotEvt(ARCSOFT_S_EVT_DONE,FALSE,NULL);         

                                           }else

                                            #endif

                                           /*MM-YW-IntegrateArcsoft Snapshot Fature-01+}*/

                                           if (numOfSnapshotExpected>0 &&

                                               numOfSnapshotExpected== numOfSnapshotRcvd) {

                                               //notify HWI that snapshot is done

                                               pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,

                                                                            NULL);

                                           }

                                       }

                                       pme->mDataCb(cb->msg_type,

                                                    cb->data,

                                                     cb->index,

                                                    cb->metadata,

                                                    pme->mCallbackCookie);

                                   }

                               }

                                break;

         ……

        }while (running);

        CDBG("%s:X",__func__);

        returnNULL;

    }

         2)、CameraService处理HAL消息函数:

        vimframeworks/av/services/camera/libcameraservice/api1/CameraClient.cpp

        set callback:

    status_t CameraClient::initialize(CameraModule *module) {

        intcallingPid = getCallingPid();

        status_t res;

        // Verify ops permissions

        res = startCameraOps();

        if(res != OK) {

            returnres;

        }

        charcamera_device_name[10];

        snprintf(camera_device_name,sizeof(camera_device_name),"%d", mCameraId);

        mHardware =new CameraHardwareInterface(camera_device_name);

        res = mHardware->initialize(module);

        ……

        mHardware->setCallbacks(notifyCallback,

                dataCallback,

                dataCallbackTimestamp,

                (void *)(uintptr_t)mCameraId);

        // Enable zoom, error, focus, and metadata messages by default

        enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM |CAMERA_MSG_FOCUS |

                     CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);

        returnOK;

    }

             Callback函数:

    void CameraClient::notifyCallback(int32_t msgType,int32_t ext1,

            int32_t ext2,void* user) {

        sp<CameraClient> client =static_cast<CameraClient*>(getClientFromCookie(user).get());

        if(client.get() == nullptr)return;

        if(!client->lockIfMessageWanted(msgType))return;

        switch(msgType) {

            caseCAMERA_MSG_SHUTTER:

                // ext1 is the dimension of the yuv picture.

                client->handleShutter();

                break;

            default:

                client->handleGenericNotify(msgType,ext1, ext2);

                break;

        }

    }

    void CameraClient::dataCallback(int32_t msgType,

            constsp<IMemory>& dataPtr,camera_frame_metadata_t *metadata,void* user) {

        sp<CameraClient> client =static_cast<CameraClient*>(getClientFromCookie(user).get());

        if(client.get() == nullptr)return;

        if(!client->lockIfMessageWanted(msgType))return;

        ……

        switch(msgType & ~CAMERA_MSG_PREVIEW_METADATA) {

            caseCAMERA_MSG_PREVIEW_FRAME:

                client->handlePreviewData(msgType,dataPtr, metadata);

                break;

            caseCAMERA_MSG_POSTVIEW_FRAME:

                client->handlePostview(dataPtr);

                break;

            caseCAMERA_MSG_RAW_IMAGE:

                client->handleRawPicture(dataPtr);

                break;

            caseCAMERA_MSG_COMPRESSED_IMAGE:

                client->handleCompressedPicture(dataPtr);

                break;

            /* MM-MC-SomcAddForSoMCAP-00+{ */

            caseCAMERA_MSG_OBJECT_TRACKING:

                client->handleObjectTracking(dataPtr);

                break;

            /* MM-MC-SomcAddForSoMCAP-00+} */

            default:

                client->handleGenericData(msgType,dataPtr, metadata);

                break;

        }

    }

    // handleObjectTracking

    void CameraClient::handleObjectTracking(constsp<IMemory>&mem) {

        LOG2("%s:",__FUNCTION__);

        sp<ICameraClient> c = mRemoteCallback;

        mLock.unlock();

        clock_t nowMilliSec =1000 * clock() /CLOCKS_PER_SEC;

        ……

            // reset isStartObjectTracking flag

            mLowPassFilterObjectTracking->isStartObjectTracking =false;

            // return callback

            if(c != NULL) { //调用客户端回调函数

                c->dataCallback(CAMERA_MSG_OBJECT_TRACKING,mem, NULL);

            }

            return;

        }

       ……

        }

        3)、Client客户端处理:

    vim frameworks/av/camera/Camera.cpp

    void Camera::notifyCallback(int32_t msgType,int32_t ext1,int32_t ext2)

    {

        returnCameraBaseT::notifyCallback(msgType,ext1, ext2);

    }

    // callback from cameraservice when frame or image is ready

    void Camera::dataCallback(int32_t msgType,constsp<IMemory>& dataPtr,

                             camera_frame_metadata_t *metadata)

    {

        sp<CameraListener> listener;

        {

            Mutex::Autolock _l(mLock);

            listener = mListener;

        }

        if(listener != NULL) {

            listener->postData(msgType,dataPtr, metadata);

        }

    }

        4)、JNI:android_hardware_Camera.cpp

            vim frameworks/base/core/jni/android_hardware_Camera.cpp

    void JNICameraContext::postData(int32_t msgType,constsp<IMemory>& dataPtr,

                                   camera_frame_metadata_t *metadata)

    {

        ……

        int32_t dataMsgType = msgType & ~CAMERA_MSG_PREVIEW_METADATA;

        // return data based on callback type

        switch(dataMsgType) {

            caseCAMERA_MSG_VIDEO_FRAME:

                // should never happen

                break;

            // For backward-compatibility purpose, if there is no callback

            // buffer for raw image, the callback returns null.

            caseCAMERA_MSG_RAW_IMAGE:

                ALOGV("rawCallback");

                if (mRawImageCallbackBuffers.isEmpty()) {

                    env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                           mCameraJObjectWeak,dataMsgType,0, 0,NULL);

                } else {

                    copyAndPost(env,dataPtr,dataMsgType);

                }

                break;

            /* MM-MC-SomcAddForSoMCAP-00+{ */

            caseCAMERA_MSG_OBJECT_TRACKING:

            {

                ……

                ssize_t offset;

                size_t size;

                sp<IMemoryHeap> heap;

                heap = dataPtr->getMemory(&offset,&size);

                ALOGV("objecttracking callback:mem off=%d, size=%d",(int) offset,(int) size);

                camera_ex_msg_object_tracking_t *cb =(camera_ex_msg_object_tracking_t *) heap->base();

                jobject object_tracking_result;

       

                if (cb != NULL) {

                    object_tracking_result = convertObjectTrackingResult(env,cb);

                } else {

                    ALOGE("objecttracking callback: heap is null");

                    env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                           mCameraJObjectWeak,CAMERA_MSG_OBJECT_TRACKING,0, 0,NULL);

                    return;

                }//数据通过jni层传给java

                env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                       mCameraJObjectWeak,CAMERA_MSG_OBJECT_TRACKING,0, 0,               object_tracking_result);

            }

            break;

           /* MM-MC-SomcAddForSoMCAP-00+} */

            // There is no data.

            case0:

                break;

            default:

                ALOGV("dataCallback(%d,%p)",dataMsgType,dataPtr.get());

                copyAndPost(env,dataPtr,dataMsgType);

                break;

        }

        // post frame metadata to Java

        if(metadata && (msgType &CAMERA_MSG_PREVIEW_METADATA)) {

            postMetadata(env,CAMERA_MSG_PREVIEW_METADATA,metadata);

        }

    }

         5)、JAVA:Camera.java

          vim frameworks/base/core/java/android/hardware/Camera.java

    //接收sendMessage数据,传输到extensoinCameraExtension.java

    private class EventHandler extendsHandler

    {

        private finalCameramCamera;

        publicEventHandler(Camera c,Looper looper) {

            super(looper);

            mCamera=c;

        }

        @Override

        public voidhandleMessage(Message msg) {

            switch(msg.what) {

            caseCAMERA_MSG_SHUTTER:

                if (mShutterCallback!= null) {

                    mShutterCallback.onShutter();

                }

                return;

            caseCAMERA_MSG_RAW_IMAGE:

                if (mRawImageCallback!= null) {

                    mRawImageCallback.onPictureTaken((byte[])msg.obj,mCamera);

                }

                return;

            ……

            caseCAMERA_MSG_OBJECT_TRACKING:

                if (mObjectTrackingFWCallback!= null) {

                    Log.e(TAG,"jay test call back");

                   mObjectTrackingFWCallback.onObjectTrackingFWCallback((ObjectTrackingResult)msg.obj, mCamera);

                }

                return;

            /* MM-MC-SomcAddForSoMCAP-00+} */

            default:

                Log.e(TAG,"Unknown message type " + msg.what);

                return;

            }

        }

    }//接受jni层数据并发送sendMessage

    private static voidpostEventFromNative(Object camera_ref,

                                           int what, intarg1, int arg2, Object obj)

    {

        Camera c = (Camera)((WeakReference)camera_ref).get();

        if(c == null)

            return;

        if(c.mEventHandler!= null) {

            Message m = c.mEventHandler.obtainMessage(what,arg1, arg2, obj);

            c.mEventHandler.sendMessage(m);

        }

    }

    6)Extension frameworkscallback

    vimvendor/semc/frameworks/base/libs/camera-extension/api/src/com/sonyericsson/cameraextension/CameraExtension.java

    public interface ObjectTrackingCallback {

        voidonObjectTracked(ObjectTrackingResultobjectTrackingResult);

    }

    public final void setObjectTrackingCallback(

            finalObjectTrackingCallback cb,

            intlowPassFilterStrength,

            intminimumIntervalMilliSec) {

        if(mIsReleased){

            return;

        }

        mObjectTrackingCallback = cb;

        if(Integer.MAX_VALUE< minimumIntervalMilliSec) {

            minimumIntervalMilliSec = minimumIntervalMilliSec;

        }

        /* ++ Somc-integrate-CameraExtension-01 */

        //setObjectTrackingLowPassFilterPrameters(lowPassFilterStrength,minimumIntervalMilliSec);

        if(mCamera!= null) {

            if(mObjectTrackingFWCallback == null) {

                mObjectTrackingFWCallback = new OTCallback();

            }

            //mObjectTrackingFWCallback传输到下层

               mCamera.setObjectTrackingLowPassFilterPrameters(mObjectTrackingFWCallback,

                    lowPassFilterStrength,minimumIntervalMilliSec);

        }

        /* -- Somc-integrate-CameraExtension-01 */

    }

    /* ++ Somc-integrate-CameraExtension-01*/

    class OTCallbackimplementsCamera.ObjectTrackingFWCallback {

        public voidonObjectTrackingFWCallback(Camera.ObjectTrackingResultobjectTrackingResult,

                Camera camera) {

            if(mObjectTrackingCallback != null&& objectTrackingResult != null) {

            ……

                if(mObjectTrackingResult == null)

                    mObjectTrackingResult = newObjectTrackingResult();

                mObjectTrackingResult.mRectOfTrackedObject =new android.graphics.Rect(

                        objectTrackingResult.mRectOfTrackedObject.left,objectTrackingResult.mRectOfTrackedObject.top,

                        objectTrackingResult.mRectOfTrackedObject.right,objectTrackingResult.mRectOfTrackedObject.bottom);

                mObjectTrackingResult.mIsLost =objectTrackingResult.mIsLost;

                mObjectTrackingCallback.onObjectTracked(mObjectTrackingResult); //传递到上层UI

            }

        }

    }

     

     

    5.      Design idea

    5.1 Callback设计机制

       1) application层定义callback函数:

        定义接口:

        vimvendor/semc/frameworks/base/libs/camera-extension/api/src/com/sonyericsson/cameraextension/CameraExtension.java

    publicinterface ObjectTrackingCallback {

        voidonObjectTracked(ObjectTrackingResultobjectTrackingResult);

    }

         定义callback函数,是真实处理底层数据的函数:

    vimvendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/controller/ObjectTracking.java

    privateclass ObjectTrackingCallbackimplementsCameraExtension.ObjectTrackingCallback {

        @Override

        public voidonObjectTracked(ObjectTrackingResultresult) {

           ……

            if(mShouldWaitForLost) {

                if(!result.mIsLost) {

                    // Ignore detect object event for wait next lost event.

                    if(CameraLogger.DEBUG) CameraLogger.d(TAG,"onObjectTracked: ignoredetect.");

                    return;

                }else {

                    // Restart object tracking after lost event.

                    if(CameraLogger.DEBUG) CameraLogger.d(TAG,"onObjectTracked: restart.");

                    mController.mCameraDevice.stopObjectTrackingCallback();

                    startTracking(mPosition);

                    mShouldWaitForLost =false;

                }

            }

            // Ignorecontinuous lost event.

            if(mIsAlreadyLost&& result.mIsLost) {

                if(CameraLogger.DEBUG) CameraLogger.d(TAG,"onObjectTracked: ignore lost");

                return;

            }

            mIsAlreadyLost= result.mIsLost;

            if(result.mIsLost) {

                mHandler.startTimeoutCount();

            }else {

                mHandler.stopTimeoutCount();

                Executor.postEvent(ControllerEvent.EV_OBJECT_TRACKING,0,

                       result.mRectOfTrackedObject);

            }

        }

    }

        调用java接口函数注册callback函数:

    privatevoid startTracking(Rect position) {

        if(CameraLogger.DEBUG) CameraLogger.d(TAG,"startTracking: " + position);

        mCallback = new ObjectTrackingCallback(); //callback函数

        mController.mCameraWindow.startObjectTrackingAnimation(position);

        mController.mCameraDevice.startObjectTracking(

                PositionConverter.getInstance().convertFaceToDevice(position),

                mCallback); //传递callback函数

    }

    vimvendor/semc/packages/apps/camera-addons/CameraUI/src/com/sonyericsson/android/camera/device/CameraDevice.java

    public void startObjectTracking(Rect position,ObjectTrackingCallback cb) {

        …… //接口函数-frameworks层提供

        mCameraExtension.setObjectTrackingCallback(cb//cb即为上面传递下来的callback函数,

                CameraDeviceUtil.OBJECT_TRACKING_LOW_PASS_FILTER_STRENGTH,

                CameraDeviceUtil.OBJECT_TRACKING_MINIMAL_INTERVAL_MS);

        mCameraExtension.startObjectTracking();

        mCameraExtension.selectObject(position.centerX(),position.centerY());

        mIsObjectTrackingRunning= true;

        ……

        newEachCameraStatusPublisher(mCameraActivity,mCameraId)

                .put(newObjectTracking(ObjectTracking.Value.ON))

                .publish();

    }

          实现用于注册callback函数的java接口函数:

    vimvendor/semc/frameworks/base/libs/camera-extension/api/src/com/sonyericsson/cameraextension/CameraExtension.java

    public final void setObjectTrackingCallback(

            finalObjectTrackingCallback cb,

            intlowPassFilterStrength,

            intminimumIntervalMilliSec) {

        if(mIsReleased) {

            return;

        }

        mObjectTrackingCallback = cb; //获得application层传下的callback函数指针

        if(Integer.MAX_VALUE <minimumIntervalMilliSec) {

            minimumIntervalMilliSec = minimumIntervalMilliSec;

        }

        /* ++Somc-integrate-CameraExtension-01 */

        //setObjectTrackingLowPassFilterPrameters(lowPassFilterStrength,minimumIntervalMilliSec);

        if(mCamera != null) {

            if(mObjectTrackingFWCallback == null) {

                mObjectTrackingFWCallback = newOTCallback(); //又一callback函数

            }//调用另一注册函数注册另一个callback函数2(双callback

            mCamera.setObjectTrackingLowPassFilterPrameters(mObjectTrackingFWCallback,

                    lowPassFilterStrength,minimumIntervalMilliSec);

        }

        /* -- Somc-integrate-CameraExtension-01*/

    }

        2) 接口层定义callback函数:

                  定义接口:

    vimframeworks/base/core/java/android/hardware/Camera.java

    publicinterface ObjectTrackingFWCallback {

        voidonObjectTrackingFWCallback(ObjectTrackingResult objectTrackingResult,Camera camera);

    };

         定义callback函数,是真实处理底层数据的函数:

    vimvendor/semc/frameworks/base/libs/camera-extension/api/src/com/sonyericsson/cameraextension/CameraExtension.java

    classOTCallbackimplementsCamera.ObjectTrackingFWCallback {

        public voidonObjectTrackingFWCallback(Camera.ObjectTrackingResult objectTrackingResult,

                Camera camera) {

            if(mObjectTrackingCallback != null&& objectTrackingResult != null) {

            ……

                if(mObjectTrackingResult == null)

                    mObjectTrackingResult = newObjectTrackingResult();

                mObjectTrackingResult.mRectOfTrackedObject =new android.graphics.Rect(

                       objectTrackingResult.mRectOfTrackedObject.left,objectTrackingResult.mRectOfTrackedObject.top,

                       objectTrackingResult.mRectOfTrackedObject.right,objectTrackingResult.mRectOfTrackedObject.bottom);

                mObjectTrackingResult.mIsLost = objectTrackingResult.mIsLost;

                mObjectTrackingCallback.onObjectTracked(mObjectTrackingResult);

            } //由上述处理可知,callback2将得到的数据传递给callback1—>传至application

        }

    }

          实现用于注册callback的接口函数:

      vim frameworks/base/core/java/android/hardware/Camera.java

    publicvoid setObjectTrackingLowPassFilterPrameters(ObjectTrackingFWCallbackcb, int lowPassFilterStrength, intminimumIntervalMilliSec){

        mObjectTrackingFWCallback =cb;

        _setObjectTrackingLowPassFilterPrameters(lowPassFilterStrength,minimumIntervalMilliSec);

    }

      接下来通过handler机制将client端得到的数据传给当前callback函数(mObjectTrackingFWCallback)。

    5.2 Eventhandler设计机制

      1) Eventhandle初始化:

    vimframeworks/base/core/java/android/hardware/Camera.java

    privateint cameraInitVersion(intcameraId,inthalVersion) {

        mShutterCallback= null;

        mRawImageCallback= null;

        mJpegCallback= null;

        mPreviewCallback= null;

        mPostviewCallback= null;

        mUsingPreviewAllocation=false;

        mZoomListener= null;

        /* ### QC ADD-ONS: START */

        mCameraDataCallback= null;

        mCameraMetaDataCallback=null;

        /* ### QC ADD-ONS: END */

        Looper looper;

        if((looper= Looper.myLooper()) !=null) { //获得当前线程looper

            mEventHandler = new EventHandler(this, looper);//mEventHandler用于和looper交互

        }else if ((looper= Looper.getMainLooper()) !=null) {

            mEventHandler= new EventHandler(this,looper);

        }else {

            mEventHandler= null;

        }

        returnnative_setup(newWeakReference<Camera>(this),cameraId,halVersion,

                ActivityThread.currentOpPackageName());

    }

      2) 向线程队列发送数据

    privatestatic void postEventFromNative(Object camera_ref, //该函数在jni中被调用(C++调用java接口函数)

                                            intwhat,int arg1, int arg2,Objectobj)

    {

        Camera c = (Camera)((WeakReference)camera_ref).get();

        if(c == null)

            return;

        if(c.mEventHandler!=null) {

            Message m = c.mEventHandler.obtainMessage(what,arg1, arg2, obj); //组合队列信息

            c.mEventHandler.sendMessage(m);//发送数据

        }

    }

    3) 接收队列数据

    privateclass EventHandlerextendsHandler

    {

        private finalCamera mCamera;

        publicEventHandler(Camera c,Looper looper) {

            super(looper);

            mCamera = c;

        }

        @Override

        public voidhandleMessage(Message msg) {

            switch(msg.what) {

            …...

            case CAMERA_MSG_OBJECT_TRACKING:

                if(mObjectTrackingFWCallback != null) {

                    Log.e(TAG, "jay test call back");  //将数据传递给callback函数

                    mObjectTrackingFWCallback.onObjectTrackingFWCallback((ObjectTrackingResult)msg.obj,mCamera);

                }

                return;

             ……

            }

        }

    }

    5.3 JNI调用java接口函数

    vimframeworks/base/core/jni/android_hardware_Camera.cpp

    fields.post_event = GetStaticMethodIDOrDie(env,clazz, "postEventFromNative",//java层函数名

                                              "(Ljava/lang/Object;IIILjava/lang/Object;)V");

    该函数在client被调用,它会调用java层函数,将数据传java接口

    void JNICameraContext::postData(int32_t msgType,constsp<IMemory>&dataPtr,

                                   camera_frame_metadata_t *metadata)

    {

        // VM pointer will be NULL if objectis released

        Mutex::Autolock _l(mLock);

        JNIEnv *env =AndroidRuntime::getJNIEnv();

        if(mCameraJObjectWeak == NULL) {

            ALOGW("callback on dead camera object");

            return;

        }

        int32_t dataMsgType = msgType& ~CAMERA_MSG_PREVIEW_METADATA;

        // return data based on callback type

        switch(dataMsgType) {

            caseCAMERA_MSG_VIDEO_FRAME:

                // should never happen

                break;

            // For backward-compatibilitypurpose, if there is no callback

            // buffer for raw image, the callbackreturns null.

            caseCAMERA_MSG_RAW_IMAGE:

                ALOGV("rawCallback");

                if(mRawImageCallbackBuffers.isEmpty()) {

                    env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                           mCameraJObjectWeak,dataMsgType,0, 0,NULL);

                }else {

                    copyAndPost(env, dataPtr, dataMsgType);

                }

                break;

            /* MM-MC-SomcAddForSoMCAP-00+{ */

            caseCAMERA_MSG_OBJECT_TRACKING:

            {

                ALOGV("object tracking callback");

                if(dataPtr == NULL) {

                    ALOGE("%s: mem is null",__FUNCTION__);

                    env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                           mCameraJObjectWeak,dataMsgType,0, 0,NULL);

                    return;

                }

                ssize_t offset;

                size_t size;

                sp<IMemoryHeap> heap;

                heap =dataPtr->getMemory(&offset,&size);

                ALOGV("object tracking callback:mem off=%d,size=%d",(int) offset,(int) size);

                camera_ex_msg_object_tracking_t*cb = (camera_ex_msg_object_tracking_t *) heap->base();

                jobject object_tracking_result;

       

                if(cb != NULL) {

                    object_tracking_result = convertObjectTrackingResult(env,cb);

                }else {

                    ALOGE("object tracking callback: heap isnull");

                    env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                           mCameraJObjectWeak,CAMERA_MSG_OBJECT_TRACKING,0, 0,NULL);

                    return;

                }

                env->CallStaticVoidMethod(mCameraJClass,fields.post_event,

                       mCameraJObjectWeak,CAMERA_MSG_OBJECT_TRACKING,0, 0,object_tracking_result);  //使用android VM接口函数实现对java函数的调用

            }

            break;

           /* MM-MC-SomcAddForSoMCAP-00+} */

            // There is no data.

            case0:

                break;

            default:

                ALOGV("dataCallback(%d, %p)",dataMsgType,dataPtr.get());

                copyAndPost(env,dataPtr, dataMsgType);

                break;

        }

        // post frame metadata to Java

        if(metadata && (msgType &CAMERA_MSG_PREVIEW_METADATA)) {

            postMetadata(env,CAMERA_MSG_PREVIEW_METADATA,metadata);

        }

    }

       下面简要分析convertObjectTrackingResult,进一步理解jni如何调用java类和方法:

        convertObjectTrackingResult函数需要调用的java层的类和方法:

           vimframeworks/base/core/java/android/hardware/Camera.java

    public static class ObjectTrackingResult {

        publicRect mRectOfTrackedObject;

        public booleanmIsLost;

    }

        vimframeworks/base/graphics/java/android/graphics/Rect.java

    public final class Rectimplements Parcelable{

        public intleft;

        public inttop;

        public intright;

        public intbottom;

        …...

      public void set(intleft, int top, intright, int bottom) {

          this.left= left;

          this.top= top;

          this.right= right;

          this.bottom= bottom;

      }

       …...

    }

       jni获取javajclass ID(类ID)、jmethod ID(方法函数ID)、jfield ID(变量ID

       vimframeworks/base/core/jni/android_hardware_Camera.cpp

    bool JNICameraContext::setUpObjectTracking(JNIEnv* env)

    {

        Mutex::Autolock _l(mLock);

        objecttracking_callback_cookie *c = &objectTrackingCookie;

        ……

        // Get jclass ID.

        jclass class_results = env->FindClass(//获取javaObjectTrackingResult

               "android/hardware/Camera$ObjectTrackingResult");

        jclass class_rect = env->FindClass(//获取java Rect

               "android/graphics/Rect");

        c->results_clazz = (jclass)env->NewGlobalRef(class_results);

        c->rect_clazz = (jclass)env->NewGlobalRef(class_rect);

        // Get jmethod ID.

        c->rect_set_mid =env->GetMethodID(c->rect_clazz,"set","(IIII)V"); //获取set方法

        // Get jfield ID. //获取变量mRectOfTrackedObject

        c->mRectOfTrackedObject_fid= env->GetFieldID(c->results_clazz,"mRectOfTrackedObject",

               "Landroid/graphics/Rect;"); //获取变量mIsLost

        c->mIsLost_fid =env->GetFieldID(c->results_clazz,"mIsLost","Z");

        env->DeleteLocalRef(class_results);

        env->DeleteLocalRef(class_rect);

        return true;

    }

       使用java层函数,即convertObjectTrackingResult

    jobject JNICameraContext::convertObjectTrackingResult(JNIEnv *env,camera_ex_msg_object_tracking_t* cb)

    {

        ……

        objecttracking_callback_cookie *c = &objectTrackingCookie;

        if(NULL == c->results_clazz) {

            ALOGD("%s:c->results_clazz is NULL;",__FUNCTION__ );

            returnNULL;

        }//获取使用ObjectTrackingResult

        jobjectcallbackObject = env->AllocObject(c->results_clazz)// create ObjectTrackingResult class

        if(NULL == callbackObject) {

            ALOGW("%s: object isNULL;",__FUNCTION__);

            returnNULL;

        }

        // Create android.graphics.Rect object. //获取使用Rect

        jobjectrect_obj = env->AllocObject(c->rect_clazz);

        if(NULL == rect_obj) {

            ALOGW("%s Errorrect_obj = %p",__FUNCTION__,rect_obj);

            returnNULL;

        }

        // Set rect data to android.graphics.Rect object.

        env->CallVoidMethod(rect_obj,c->rect_set_mid, //使用类Rectset方法

               cb->rect[0], cb->rect[1], cb->rect[2],cb->rect[3]);

        // Set android.graphics.Rect object to ObjectTrackingResult.Rect.

        env->SetObjectField(callbackObject,c->mRectOfTrackedObject_fid,rect_obj);

        env->DeleteLocalRef(rect_obj);//将数据传给javamRectOfTrackedObject变量

        // Set isLost boolean to ObjectTrackingResult.boolean.

        env->SetBooleanField(callbackObject, c->mIsLost_fid,cb->isLost);

        if(mObjectObjectTrackingResult != NULL) {//isloat信息传给java mIsLost变量

            env->DeleteGlobalRef(mObjectObjectTrackingResult);

            mObjectObjectTrackingResult = NULL;

        }

        mObjectObjectTrackingResult= env->NewGlobalRef(callbackObject);//结果返回

        env->DeleteLocalRef(callbackObject);

        returnmObjectObjectTrackingResult;

    }

    5.4 Client端数据处理

        该函数会被service调用:

        vim frameworks/av/camera/Camera.cpp

    void Camera::dataCallback(int32_t msgType,const sp<IMemory>&dataPtr,

                             camera_frame_metadata_t *metadata)

    {

        sp<CameraListener> listener;

        {

            Mutex::Autolock_l(mLock);

            listener = mListener; //listener会在jni中注册,下面分析

        }

        if(listener != NULL) { //调用jni postData函数将数据传至jni

            listener->postData(msgType,dataPtr,metadata);

        }

    }

        在jni中调用client接口设置listener:

            client中设置listener接口函数:

    void Camera::setListener(constsp<CameraListener>&listener) //jni中被调用

    {

        Mutex::Autolock _l(mLock);

        mListener =listener;

    }

            调用接口函数设置listener:

        vimframeworks/base/core/jni/android_hardware_Camera.cpp

    static jintandroid_hardware_Camera_native_setup(JNIEnv *env,jobject thiz,

        jobject weak_this,jint cameraId,jint halVersion,jstringclientPackageName)

    {

        ……

        // We use a weak reference so the Camera object can be garbagecollected.

        // The reference is only used as a proxy for callbacks.

        sp<JNICameraContext>context = new JNICameraContext(env,weak_this, clazz, camera);

        context->incStrong((void*)android_hardware_Camera_native_setup);

        camera->setListener(context);

        // save context in opaque field

        env->SetLongField(thiz,fields.context,(jlong)context.get());

        returnNO_ERROR;

    }

    5.5 Service端数据处理

        vim./frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp

    void CameraClient::handleObjectTracking(constsp<IMemory>& mem) {

        ……

            if(c != NULL) {//调用clientdataCallback函数传输数据

                c->dataCallback(CAMERA_MSG_OBJECT_TRACKING,mem,NULL);

                LOG2("dataCallback left.top.right.bottom : %4d.%4d.%4d.%4d",

                        orgCb->rect[0]orgCb->rect[1],

                        orgCb->rect[2],orgCb->rect[3]);

            }

        }

    }

        再次用到callback机制:

           声明callback函数:

    void CameraClient::dataCallback(int32_t msgType,

            constsp<IMemory>& dataPtr,camera_frame_metadata_t *metadata,void* user) {

        LOG2("dataCallback(%d)",msgType);

        sp<CameraClient> client =static_cast<CameraClient*>(getClientFromCookie(user).get());

        if(client.get() == nullptr)return;

        if(!client->lockIfMessageWanted(msgType))return;

        if(dataPtr == 0&& metadata == NULL) {

            ALOGE("Null datareturned in data callback");

            client->handleGenericNotify(CAMERA_MSG_ERROR,UNKNOWN_ERROR,0);

            return;

        }

        switch(msgType & ~CAMERA_MSG_PREVIEW_METADATA) {

            ……

            caseCAMERA_MSG_OBJECT_TRACKING:

                client->handleObjectTracking(dataPtr);

                break;

            /* MM-MC-SomcAddForSoMCAP-00+} */

            default:

                client->handleGenericData(msgType,dataPtr,metadata);

                break;

          }

    }

        调用hal接口函数注册callback函数:

    status_t CameraClient::initialize(CameraModule *module) {

        intcallingPid = getCallingPid();

        status_t res;

        ……

        charcamera_device_name[10];

        snprintf(camera_device_name,sizeof(camera_device_name),"%d",mCameraId);

        mHardware =new CameraHardwareInterface(camera_device_name);

        res = mHardware->initialize(module);

        if(res != OK) {

            ALOGE("%s: Camera %d:unable to initialize device: %s (%d)",

                   __FUNCTION__,mCameraId,strerror(-res),res);

            mHardware.clear();

            returnres;

        }

        mHardware->setCallbacks(notifyCallback,

               dataCallback,

               dataCallbackTimestamp,

               (void*)(uintptr_t)mCameraId);

        // Enable zoom, error, focus, and metadata messages by default

        enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM |CAMERA_MSG_FOCUS |

                      CAMERA_MSG_PREVIEW_METADATA| CAMERA_MSG_FOCUS_MOVE);

        LOG1("CameraClient::initialize X (pid%d, id %d)",callingPid,mCameraId);

        returnOK;

    }

       定义注册callabck接口函数:

       vimframeworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h

    void setCallbacks(notify_callbacknotify_cb,

                      data_callbackdata_cb,

                      data_callback_timestampdata_cb_timestamp,

                      void*user)

    {

        mNotifyCb = notify_cb;

        mDataCb = data_cb;

        mDataCbTimestamp = data_cb_timestamp;

        mCbUser = user;

        ALOGV("%s(%s)",__FUNCTION__,mName.string());

        if(mDevice->ops->set_callbacks) { //调用hardware层设置callback函数,类似,不做介绍

            mDevice->ops->set_callbacks(mDevice,

                                  __notify_cb,

                                  __data_cb, //会调用mDataCb传递数据

                                  __data_cb_timestamp,

                                  __get_memory,

                                  this);

        }

    }

     

    转载请注明原文地址: https://ju.6miu.com/read-1299418.html
    最新回复(0)