基于Android P版本分析
在分析openCamera流程的时候,分析到了CameraProviderManager中的openSession()函数,其中调用了deviceInfo3->mInterface->open()函数,这个函数直接调用到了HAL层,我们接着CameraServer的openCamera流程,分析一下HAL的调转过程;
php
status_t CameraProviderManager::openSession(const std::string &id,
const sp<hardware::camera::device::V3_2::ICameraDeviceCallback>& callback,
/*out*/
sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
// 这一步已经涉及到了HAL层,通过findDeviceInfoLocked()方法获取到了hal层的camera device
// std::vector<std::unique_ptr<DeviceInfo>> mDevices;
auto deviceInfo = findDeviceInfoLocked(id,
/*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
Status status;
hardware::Return<void> ret;
// 这一步涉及到了Hal层,ICameraDeviceSession是hal层的
// 这一步用于在camera provider与camera service之间建立的一个会话机制
ret = deviceInfo3->mInterface->open(callback, [&status, &session]
(Status s, const sp<device::V3_2::ICameraDeviceSession>& cameraSession) {
status = s;
if (status == Status::OK) {
*session = cameraSession;
}
});
if (!ret.isOk()) {
ALOGE("%s: Transaction error opening a session for camera device %s: %s",
__FUNCTION__, id.c_str(), ret.description().c_str());
return DEAD_OBJECT;
}
return mapToStatusT(status);
}
deviceInfo3->mInterface->open
我们首先看一下如何获取的deviceInfo对象:
rust
CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
const std::string& id,
hardware::hidl_version minVersion, hardware::hidl_version maxVersion) const {
for (auto& provider : mProviders) {
for (auto& deviceInfo : provider->mDevices) {
if (deviceInfo->mId == id &&
minVersion <= deviceInfo->mVersion && maxVersion >= deviceInfo->mVersion) {
return deviceInfo.get();
}
}
}
return nullptr;
}
通过findDeviceInfoLocked函数,我看可知,通过cameraId和version信息在mProvider中来获取对应的deviceInfo;
mProviders的定义在CameraProviderManager.h中:
arduino
std::vector<sp<ProviderInfo>> mProviders;
mProvider的赋值逻辑在CameraServer启动的时候,在CameraServer的onFirstRef函数中,通过调用enumerateProviders() --> mCameraProviderManager->initialize() --> CameraProviderManager::addProviderLocked()函数中赋值的;
即总的来说,就是在CameraServer启动的时候,已经将deviceInfo的信息保存到了mProvider中;
紧接着我们看一下deviceInfo3->mInterface->open的逻辑:
ini
auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
Status status;
hardware::Return<void> ret;
// 这一步涉及到了Hal层,ICameraDeviceSession是hal层的
// 这一步用于在camera provider与camera service之间建立的一个会话机制
ret = deviceInfo3->mInterface->open(callback, [&status, &session]
(Status s, const sp<device::V3_2::ICameraDeviceSession>& cameraSession) {
status = s;
if (status == Status::OK) {
*session = cameraSession;
}
});
通过deviceInfo3的赋值逻辑上得知,deviceInfo3对象的类型为ProviderInfo::DeviceInfo3,所以我们看一下DeviceInfo3的定义:
ini
CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
const metadata_vendor_id_t tagId, const std::string &id,
uint16_t minorVersion,
const CameraResourceCost& resourceCost,
sp<InterfaceT> interface) :
DeviceInfo(name, tagId, id, hardware::hidl_version{3, minorVersion},
resourceCost),
mInterface(interface) {
// Get camera characteristics and initialize flash unit availability
Status status;
hardware::Return<void> ret;
ret = mInterface->getCameraCharacteristics([&status, this](Status s,
device::V3_2::CameraMetadata metadata) {
status = s;
if (s == Status::OK) {
camera_metadata_t *buffer =
reinterpret_cast<camera_metadata_t*>(metadata.data());
size_t expectedSize = metadata.size();
int res = validate_camera_metadata_structure(buffer, &expectedSize);
if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
set_camera_metadata_vendor_id(buffer, mProviderTagid);
mCameraCharacteristics = buffer;
} else {
ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
status = Status::INTERNAL_ERROR;
}
}
});
if (!ret.isOk()) {
ALOGE("%s: Transaction error getting camera characteristics for device %s"
" to check for a flash unit: %s", __FUNCTION__, mId.c_str(),
ret.description().c_str());
return;
}
if (status != Status::OK) {
ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
__FUNCTION__, mId.c_str(), CameraProviderManager::statusToString(status), status);
return;
}
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
mHasFlashUnit = true;
} else {
mHasFlashUnit = false;
}
}
这个就是DeviceInfo3的构造定义,我们主要关注一下mInterface的定义,mInterface的类型为sp,我们通过查看可知:
arduino
// HALv3-specific camera fields, including the actual device interface
struct DeviceInfo3 : public DeviceInfo {
typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
const sp<InterfaceT> mInterface;
virtual status_t setTorchMode(bool enabled) override;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
virtual bool isAPI1Compatible() const override;
virtual status_t dumpState(int fd) const override;
virtual status_t getCameraCharacteristics(
CameraMetadata *characteristics) const override;
DeviceInfo3(const std::string& name, const metadata_vendor_id_t tagId,
const std::string &id, uint16_t minorVersion,
const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
sp<InterfaceT> interface);
virtual ~DeviceInfo3();
private:
CameraMetadata mCameraCharacteristics;
};
可以看到,InterfaceT的类型为hardware:📷:device::V3_2::ICameraDevice类型:
/hardware/interfaces/camera/device/3.2/ICameraDevice.hal
scss
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.hardware.camera.device@3.2;
import android.hardware.camera.common@1.0::types;
import ICameraDeviceSession;
import ICameraDeviceCallback;
/**
* Camera device HAL, first modern version
*
* Supports the android.hardware.Camera API, and the android.hardware.camera2
* API at LIMITED or better hardware level.
*
*/
interface ICameraDevice {
getResourceCost() generates (Status status, CameraResourceCost resourceCost);
getCameraCharacteristics() generates
(Status status, CameraMetadata cameraCharacteristics);
setTorchMode(TorchMode mode) generates (Status status);
open(ICameraDeviceCallback callback) generates
(Status status, ICameraDeviceSession session);
dumpState(handle fd);
};
在该interface中定义了如上的几个方法,其中定义了open()函数,该interface由CameraDevice实现:
/hardware/interfaces/camera/device/3.2/default/CameraDevice_3_2.h
arduino
#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE_H
#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE_H
#include "utils/Mutex.h"
#include "CameraModule.h"
#include "CameraMetadata.h"
#include "CameraDeviceSession.h"
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <hidl/Status.h>
#include <hidl/MQDescriptor.h>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace V3_2 {
namespace implementation {
using ::android::hardware::camera::device::V3_2::RequestTemplate;
using ::android::hardware::camera::device::V3_2::ICameraDevice;
using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback;
using ::android::hardware::camera::device::V3_2::ICameraDeviceSession;
using ::android::hardware::camera::common::V1_0::CameraResourceCost;
using ::android::hardware::camera::common::V1_0::Status;
using ::android::hardware::camera::common::V1_0::TorchMode;
using ::android::hardware::camera::common::V1_0::helper::CameraModule;
using ::android::hardware::Return;
using ::android::hardware::Void;
using ::android::hardware::hidl_vec;
using ::android::hardware::hidl_string;
using ::android::sp;
using ::android::Mutex;
/*
* The camera device HAL implementation is opened lazily (via the open call)
*/
struct CameraDevice : public ICameraDevice {
// Called by provider HAL. Provider HAL must ensure the uniqueness of
// CameraDevice object per cameraId, or there could be multiple CameraDevice
// trying to access the same physical camera.
// Also, provider will have to keep track of all CameraDevice objects in
// order to notify CameraDevice when the underlying camera is detached
CameraDevice(sp<CameraModule> module,
const std::string& cameraId,
const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames);
~CameraDevice();
// Caller must use this method to check if CameraDevice ctor failed
bool isInitFailed() { return mInitFail; }
// Used by provider HAL to signal external camera disconnected
void setConnectionStatus(bool connected);
/* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. */
// The following method can be called without opening the actual camera device
Return<void> getResourceCost(getResourceCost_cb _hidl_cb) override;
Return<void> getCameraCharacteristics(getCameraCharacteristics_cb _hidl_cb) override;
Return<Status> setTorchMode(TorchMode mode) override;
// Open the device HAL and also return a default capture session
Return<void> open(const sp<ICameraDeviceCallback>& callback, open_cb _hidl_cb) override;
// Forward the dump call to the opened session, or do nothing
Return<void> dumpState(const ::android::hardware::hidl_handle& fd) override;
/* End of Methods from ::android::hardware::camera::device::V3_2::ICameraDevice */
在CameraDevice_3_2.h中,实现了ICameraDevice接口;
/hardware/interfaces/camera/device/3.2/default/CameraDevice.cpp
arduino
#define LOG_TAG "CamDev@3.2-impl"
#include <log/log.h>
#include <utils/Vector.h>
#include <utils/Trace.h>
#include "CameraDevice_3_2.h"
#include <include/convert.h>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace V3_2 {
namespace implementation {
using ::android::hardware::camera::common::V1_0::Status;
CameraDevice::CameraDevice(
sp<CameraModule> module, const std::string& cameraId,
const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames) :
mModule(module),
mCameraId(cameraId),
mDisconnected(false),
mCameraDeviceNames(cameraDeviceNames) {
mCameraIdInt = atoi(mCameraId.c_str());
// Should not reach here as provider also validate ID
if (mCameraIdInt < 0) {
ALOGE("%s: Invalid camera id: %s", __FUNCTION__, mCameraId.c_str());
mInitFail = true;
} else if (mCameraIdInt >= mModule->getNumberOfCameras()) {
ALOGI("%s: Adding a new camera id: %s", __FUNCTION__, mCameraId.c_str());
}
mDeviceVersion = mModule->getDeviceVersion(mCameraIdInt);
if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
ALOGE("%s: Camera id %s does not support HAL3.2+",
__FUNCTION__, mCameraId.c_str());
mInitFail = true;
}
}
CameraDevice::~CameraDevice() {}
........................
在CameraDevice.cpp中include了CameraDevice_3_2.h,同时在该类中实现了open()函数;
scss
Return<void> CameraDevice::open(const sp<ICameraDeviceCallback>& callback, open_cb _hidl_cb) {
Status status = initStatus();
sp<CameraDeviceSession> session = nullptr;
if (callback == nullptr) {
ALOGE("%s: cannot open camera %s. callback is null!",
__FUNCTION__, mCameraId.c_str());
_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
return Void();
}
if (status != Status::OK) {
// Provider will never pass initFailed device to client, so
// this must be a disconnected camera
ALOGE("%s: cannot open camera %s. camera is disconnected!",
__FUNCTION__, mCameraId.c_str());
_hidl_cb(Status::CAMERA_DISCONNECTED, nullptr);
return Void();
} else {
mLock.lock();
ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mCameraIdInt);
session = mSession.promote();
if (session != nullptr && !session->isClosed()) {
ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);
mLock.unlock();
_hidl_cb(Status::CAMERA_IN_USE, nullptr);
return Void();
}
/** Open HAL device */
status_t res;
camera3_device_t *device;
ATRACE_BEGIN("camera3->open");
res = mModule->open(mCameraId.c_str(),
reinterpret_cast<hw_device_t**>(&device));
ATRACE_END();
if (res != OK) {
ALOGE("%s: cannot open camera %s!", __FUNCTION__, mCameraId.c_str());
mLock.unlock();
_hidl_cb(getHidlStatus(res), nullptr);
return Void();
}
/** Cross-check device version */
if (device->common.version < CAMERA_DEVICE_API_VERSION_3_2) {
ALOGE("%s: Could not open camera: "
"Camera device should be at least %x, reports %x instead",
__FUNCTION__,
CAMERA_DEVICE_API_VERSION_3_2,
device->common.version);
device->common.close(&device->common);
mLock.unlock();
_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
return Void();
}
struct camera_info info;
res = mModule->getCameraInfo(mCameraIdInt, &info);
if (res != OK) {
ALOGE("%s: Could not open camera: getCameraInfo failed", __FUNCTION__);
device->common.close(&device->common);
mLock.unlock();
_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);
return Void();
}
session = createSession(
device, info.static_camera_characteristics, callback);
if (session == nullptr) {
ALOGE("%s: camera device session allocation failed", __FUNCTION__);
mLock.unlock();
_hidl_cb(Status::INTERNAL_ERROR, nullptr);
return Void();
}
if (session->isInitFailed()) {
ALOGE("%s: camera device session init failed", __FUNCTION__);
session = nullptr;
mLock.unlock();
_hidl_cb(Status::INTERNAL_ERROR, nullptr);
return Void();
}
mSession = session;
IF_ALOGV() {
session->getInterface()->interfaceChain([](
::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) {
ALOGV("Session interface chain:");
for (auto iface : interfaceChain) {
ALOGV(" %s", iface.c_str());
}
});
}
mLock.unlock();
}
_hidl_cb(status, session->getInterface());
return Void();
}
在该函数中主要做了两件事:
- 通过mModule调用Camera HAL接口的open接口;
- 创建session并让mSession保存创建session;
我们看一下Camera HAL open逻辑的时序图:
mModule->open
在该函数中,首先调用initStatus()函数:
ini
Status CameraDevice::initStatus() const {
Mutex::Autolock _l(mLock);
Status status = Status::OK;
if (mInitFail) {
status = Status::INTERNAL_ERROR;
} else if (mDisconnected) {
status = Status::CAMERA_DISCONNECTED;
}
return status;
}
在该函数中,主要是对mInitFail和mDisconnected这两个状态值进行判断;
mInitFail和mDisconnected的赋值逻辑在CameraDevice的构造函数中定义的:
c
CameraDevice::CameraDevice(
sp<CameraModule> module, const std::string& cameraId,
const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames) :
mModule(module),
mCameraId(cameraId),
mDisconnected(false),
mCameraDeviceNames(cameraDeviceNames) {
mCameraIdInt = atoi(mCameraId.c_str());
// Should not reach here as provider also validate ID
if (mCameraIdInt < 0) {
ALOGE("%s: Invalid camera id: %s", __FUNCTION__, mCameraId.c_str());
mInitFail = true;
} else if (mCameraIdInt >= mModule->getNumberOfCameras()) {
ALOGI("%s: Adding a new camera id: %s", __FUNCTION__, mCameraId.c_str());
}
mDeviceVersion = mModule->getDeviceVersion(mCameraIdInt);
if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
ALOGE("%s: Camera id %s does not support HAL3.2+",
__FUNCTION__, mCameraId.c_str());
mInitFail = true;
}
}
在该构造中,将mDisconnected赋值为false,然后根据传入的cameraId和DeviceVersion进行判断,如果不符合预期结果,则赋值为true;
正常情况下,mInitFail和mDisconnected初始化赋值都为false;
所以CameraDevice::initStatus()的返回值为Status::OK;
紧接着CameraDevice::open函数中通过mModule调用Camera HAL接口的open接口,mModule(CameraModule)是对Camera HAL的一层封装:
scss
/** Open HAL device */
status_t res;
camera3_device_t *device;
ATRACE_BEGIN("camera3->open");
res = mModule->open(mCameraId.c_str(),
reinterpret_cast<hw_device_t**>(&device));
ATRACE_END();
mModule的类型为CameraModule,在CameraDevice构造中定义;
我看一下CameraModule:
/hardware/interfaces/camera/common/1.0/default/CameraModule.cpp
arduino
int CameraModule::open(const char* id, struct hw_device_t** device) {
int res;
ATRACE_BEGIN("camera_module->open");
res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
ATRACE_END();
return res;
}
紧接着调用了filterOpenErrorCode函数,该函数主要是辨别state值的,核心是传入的参数:mModule->common.methods->open(&mModule->common, id, device);
其中mModule是在构造函数中赋值的:
arduino
CameraModule::CameraModule(camera_module_t *module) {
if (module == NULL) {
ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
assert(0);
}
mModule = module;
}
可以发现,mModule的类型为camera_module_t:
/hardware/libhardware/include/hardware/camera_common.h
c
typedef struct camera_module {
hw_module_t common;
int (*get_number_of_cameras)(void);
int (*get_camera_info)(int camera_id, struct camera_info *info);
int (*set_callbacks)(const camera_module_callbacks_t *callbacks);
void (*get_vendor_tag_ops)(vendor_tag_ops_t* ops);
int (*open_legacy)(const struct hw_module_t* module, const char* id,
uint32_t halVersion, struct hw_device_t** device);
int (*set_torch_mode)(const char* camera_id, bool enabled);
int (*init)();
/* reserved for future use */
void* reserved[5];
} camera_module_t;
我们可以发现,common的类型为hw_module_t:
/hardware/libhardware/include/hardware/hardware.h
arduino
typedef struct hw_module_t {
/** tag must be initialized to HARDWARE_MODULE_TAG */
uint32_t tag;
uint16_t module_api_version;
#define version_major module_api_version
uint16_t hal_api_version;
#define version_minor hal_api_version
/** Identifier of module */
const char *id;
/** Name of this module */
const char *name;
/** Author/owner/implementor of the module */
const char *author;
/** Modules methods */
struct hw_module_methods_t* methods;
/** module's dso */
void* dso;
#ifdef __LP64__
uint64_t reserved[32-7];
#else
/** padding to 128 bytes, reserved for future use */
uint32_t reserved[32-7];
#endif
} hw_module_t;
methods的类型为hw_module_methods_t:
arduino
typedef struct hw_module_methods_t {
/** Open a specific device */
int (*open)(const struct hw_module_t* module, const char* id,
struct hw_device_t** device);
} hw_module_methods_t;
最终,可以发现,open是一个函数指针,它指向的是QCamera2Factory的camera_device_open函数:
arduino
int QCamera2Factory::camera_device_open(
const struct hw_module_t *module, const char *id,
struct hw_device_t **hw_device)
{
int rc = NO_ERROR;
if (module != &HAL_MODULE_INFO_SYM.common) {
LOGE("Invalid module. Trying to open %p, expect %p",
module, &HAL_MODULE_INFO_SYM.common);
return INVALID_OPERATION;
}
if (!id) {
LOGE("Invalid camera id");
return BAD_VALUE;
}
#ifdef QCAMERA_HAL1_SUPPORT
if(gQCameraMuxer)
rc = gQCameraMuxer->camera_device_open(module, id, hw_device);
else
#endif
rc = gQCamera2Factory->cameraDeviceOpen(atoi(id), hw_device);
return rc;
}
struct hw_module_methods_t QCamera2Factory::mModuleMethods = {
.open = QCamera2Factory::camera_device_open,
};
我们目前是基于HAL3分析,所以我们直接分析QCamera2Factory::cameraDeviceOpen函数:
ini
int QCamera2Factory::cameraDeviceOpen(int camera_id,
struct hw_device_t **hw_device)
{
int rc = NO_ERROR;
if (camera_id < 0 || camera_id >= mNumOfCameras)
return -ENODEV;
if ( NULL == mHalDescriptors ) {
LOGE("Hal descriptor table is not initialized!");
return NO_INIT;
}
LOGI("Open camera id %d API version %d",
camera_id, mHalDescriptors[camera_id].device_version);
if ( mHalDescriptors[camera_id].device_version == CAMERA_DEVICE_API_VERSION_3_0 ) {
CAMSCOPE_INIT(CAMSCOPE_SECTION_HAL);
QCamera3HardwareInterface *hw = new QCamera3HardwareInterface(mHalDescriptors[camera_id].cameraId,
mCallbacks);
if (!hw) {
LOGE("Allocation of hardware interface failed");
return NO_MEMORY;
}
rc = hw->openCamera(hw_device);
if (rc != 0) {
delete hw;
}
}
#ifdef QCAMERA_HAL1_SUPPORT
else if (mHalDescriptors[camera_id].device_version == CAMERA_DEVICE_API_VERSION_1_0) {
QCamera2HardwareInterface *hw = new QCamera2HardwareInterface((uint32_t)camera_id);
if (!hw) {
LOGE("Allocation of hardware interface failed");
return NO_MEMORY;
}
rc = hw->openCamera(hw_device);
if (rc != NO_ERROR) {
delete hw;
}
}
#endif
else {
LOGE("Device version for camera id %d invalid %d",
camera_id,
mHalDescriptors[camera_id].device_version);
return BAD_VALUE;
}
return rc;
}
在该函数中,核心逻辑就是hw->openCamera(hw_device):
scss
int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
int rc = 0;
if (mState != CLOSED) {
*hw_device = NULL;
return PERMISSION_DENIED;
}
logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
mCameraId);
if (mCameraHandle) {
LOGE("Failure: Camera already opened");
return ALREADY_EXISTS;
}
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
if (gActiveEaselClient == 0) {
rc = gEaselManagerClient->resume(this);
if (rc != 0) {
ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
return rc;
}
mEaselFwUpdated = false;
}
gActiveEaselClient++;
mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
if (rc != OK) {
ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
strerror(-rc), rc);
return rc;
}
}
}
rc = openCamera();
if (rc == 0) {
*hw_device = &mCameraDevice.common;
} else {
*hw_device = NULL;
// Suspend Easel because opening camera failed.
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
if (gActiveEaselClient == 1) {
status_t suspendErr = gEaselManagerClient->suspend();
if (suspendErr != 0) {
ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
strerror(-suspendErr), suspendErr);
}
}
gActiveEaselClient--;
}
if (mQCamera3HdrPlusListenerThread != nullptr) {
mQCamera3HdrPlusListenerThread->requestExit();
mQCamera3HdrPlusListenerThread->join();
mQCamera3HdrPlusListenerThread = nullptr;
}
}
}
LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
mCameraId, rc);
if (rc == NO_ERROR) {
mState = OPENED;
}
return rc;
}
int QCamera3HardwareInterface::openCamera()
{
int rc = 0;
char value[PROPERTY_VALUE_MAX];
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
if (rc < 0) {
LOGE("Failed to reserve flash for camera id: %d",
mCameraId);
return UNKNOWN_ERROR;
}
rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
if (rc) {
LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
return rc;
}
if (!mCameraHandle) {
LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
return -ENODEV;
}
rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle, (void *)this);
if (rc < 0) {
LOGE("Error, failed to register event callback");
/* Not closing camera here since it is already handled in destructor */
return FAILED_TRANSACTION;
}
mExifParams.debug_params =
(mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
if (mExifParams.debug_params) {
memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
} else {
LOGE("Out of Memory. Allocation failed for 3A debug exif params");
return NO_MEMORY;
}
mFirstConfiguration = true;
//Notify display HAL that a camera session is active.
//But avoid calling the same during bootup because camera service might open/close
//cameras at boot time during its initialization and display service will also internally
//wait for camera service to initialize first while calling this display API, resulting in a
//deadlock situation. Since boot time camera open/close calls are made only to fetch
//capabilities, no need of this display bw optimization.
//Use "service.bootanim.exit" property to know boot status.
property_get("service.bootanim.exit", value, "0");
if (atoi(value) == 1) {
pthread_mutex_lock(&gCamLock);
if (gNumCameraSessions++ == 0) {
setCameraLaunchStatus(true);
}
pthread_mutex_unlock(&gCamLock);
}
//fill the session id needed while linking dual cam
pthread_mutex_lock(&gCamLock);
rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
&sessionId[mCameraId]);
pthread_mutex_unlock(&gCamLock);
if (rc < 0) {
LOGE("Error, failed to get sessiion id");
return UNKNOWN_ERROR;
} else {
//Allocate related cam sync buffer
//this is needed for the payload that goes along with bundling cmd for related
//camera use cases
m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
if(rc != OK) {
rc = NO_MEMORY;
LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
return NO_MEMORY;
}
//Map memory for related cam sync buffer
rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
m_pDualCamCmdHeap->getFd(0),
sizeof(cam_dual_camera_cmd_info_t),
m_pDualCamCmdHeap->getPtr(0));
if(rc < 0) {
LOGE("Dualcam: failed to map Related cam sync buffer");
rc = FAILED_TRANSACTION;
return NO_MEMORY;
}
m_pDualCamCmdPtr =
(cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
}
LOGH("mCameraId=%d",mCameraId);
return NO_ERROR;
}
该函数中调用了camera_open函数,这样,就真正的进入了HAL层的open逻辑:
ini
int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
{
int32_t rc = 0;
mm_camera_obj_t *cam_obj = NULL;
uint32_t cam_idx = camera_idx;
uint32_t aux_idx = 0;
uint8_t is_multi_camera = 0;
........................
cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
if(NULL == cam_obj) {
pthread_mutex_unlock(&g_intf_lock);
LOGE("no mem");
return -EINVAL;
}
/* initialize camera obj */
// 初始化mm_camera对应结构体
memset(cam_obj, 0, sizeof(mm_camera_obj_t));
cam_obj->ctrl_fd = -1;
cam_obj->ds_fd = -1;
cam_obj->ref_count++;
cam_obj->my_num = 0;
cam_obj->my_hdl = mm_camera_util_generate_handler(cam_idx);
cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
cam_obj->vtbl.ops = &mm_camera_ops;
pthread_mutex_init(&cam_obj->cam_lock, NULL);
pthread_mutex_init(&cam_obj->muxer_lock, NULL);
/* unlock global interface lock, if not, in dual camera use case,
* current open will block operation of another opened camera obj*/
pthread_mutex_lock(&cam_obj->cam_lock);
pthread_mutex_unlock(&g_intf_lock);
rc = mm_camera_open(cam_obj);
if (rc != 0) {
LOGE("mm_camera_open err = %d", rc);
pthread_mutex_destroy(&cam_obj->cam_lock);
pthread_mutex_lock(&g_intf_lock);
g_cam_ctrl.cam_obj[cam_idx] = NULL;
free(cam_obj);
cam_obj = NULL;
pthread_mutex_unlock(&g_intf_lock);
*camera_vtbl = NULL;
return rc;
}
........................
}
紧接着该函数中调用了mm_camera_open函数,这个函数在mm_camera.c中定义;
mm_camera.c文件主要是产生和管理channel,然后配置stream以及管理,然后也定义了一些事件回调来处理事件,mm_camera和mm_channel的沟通方式是调用mm_channel的状态机方法;
/hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
scss
int32_t mm_camera_open(mm_camera_obj_t *my_obj)
{
........................
mm_camera_cmd_thread_launch(&my_obj->evt_thread,
mm_camera_dispatch_app_event,
(void *)my_obj);
/* launch event poll thread
* we will add evt fd into event poll thread upon user first register for evt */
LOGD("Launch evt Poll Thread in Cam Open");
snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
MM_CAMERA_POLL_TYPE_EVT);
mm_camera_evt_sub(my_obj, TRUE);
........................
}
在opencamera的时候会开启mm_thread.c中定义的两个线程:cmd_thread和poll_thread,poll_thread是一个很关键的线程最后获取数据也是靠这个线程但是现在开启的这个线程不是用来处理数据的而是用于处理kernel返回的event;
mm_camera_cmd_thread_launch
mm_camera_cmd_thread_launch 这个线程完成命令的分发,具体的命令如下:
arduino
typedef enum
{
MM_CAMERA_CMD_TYPE_DATA_CB, /* dataB CMD */
MM_CAMERA_CMD_TYPE_EVT_CB, /* evtCB CMD */
MM_CAMERA_CMD_TYPE_EXIT, /* EXIT */
MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB, /* superbuf dataB CMD */
MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY, /* configure notify mode */
MM_CAMERA_CMD_TYPE_START_ZSL, /* start zsl snapshot for channel */
MM_CAMERA_CMD_TYPE_STOP_ZSL, /* stop zsl snapshot for channel */
MM_CAMERA_CMD_TYPE_FLUSH_QUEUE, /* flush queue */
MM_CAMERA_CMD_TYPE_GENERAL, /* general cmd */
MM_CAMERA_CMD_TYPE_MAX
} mm_camera_cmdcb_type_t;
callback函数:
mm_camera_cmd_thread
-> mm_camera_dispatch_app_event
-> my_obj->evt.evt[i].evt_cb(...)
mm_camera_poll_thread_launch
mm_camera_poll_thread_launch线程主要完成EVT事件和DATA事件的Poll监听,例如最重要的"/dev/videoX"事件的到来;
arduino
typedef enum {
MM_CAMERA_POLL_TYPE_EVT,
MM_CAMERA_POLL_TYPE_DATA,
MM_CAMERA_POLL_TYPE_MAX
} mm_camera_poll_thread_type_t;
在open的时候创建的是EVT事件;
mm_camera_evt_sub
我们来看下mm_camera_evt_sub订阅事件的过程;
perl
int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
uint8_t reg_flag)
{
int32_t rc = 0;
struct v4l2_event_subscription sub;
memset(&sub, 0, sizeof(sub));
sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
sub.id = MSM_CAMERA_MSM_NOTIFY;
if(FALSE == reg_flag) {
/* unsubscribe */
rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
if (rc < 0) {
LOGE("unsubscribe event rc = %d, errno %d",
rc, errno);
return rc;
}
/* remove evt fd from the polling thraed when unreg the last event */
rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
0, my_obj->my_hdl, mm_camera_sync_call);
} else {
rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
if (rc < 0) {
LOGE("subscribe event rc = %d, errno %d",
rc, errno);
return rc;
}
/* add evt fd to polling thread when subscribe the first event */
rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
0, my_obj->my_hdl, my_obj->ctrl_fd, mm_camera_event_notify,
(void*)my_obj, mm_camera_sync_call);
}
return rc;
}
将/dev/videoX加入到poll的线程中;
createSession
上述通过kernel层获取设备节点,开启camera device并起事件线程之后,就需要创建对应的session,用于session传递;
ini
session = createSession(
device, info.static_camera_characteristics, callback);
我们看一下createSession函数逻辑:
arduino
sp<CameraDeviceSession> CameraDevice::createSession(camera3_device_t* device,
const camera_metadata_t* deviceInfo,
const sp<ICameraDeviceCallback>& callback) {
return new CameraDeviceSession(device, deviceInfo, callback);
}
直接返回了一个CameraDeviceSession对象;
scss
CameraDeviceSession::CameraDeviceSession(
camera3_device_t* device,
const camera_metadata_t* deviceInfo,
const sp<ICameraDeviceCallback>& callback) :
camera3_callback_ops({&sProcessCaptureResult, &sNotify}),
mDevice(device),
mDeviceVersion(device->common.version),
mFreeBufEarly(shouldFreeBufEarly()),
mIsAELockAvailable(false),
mDerivePostRawSensKey(false),
mNumPartialResults(1),
mResultBatcher(callback) {
mDeviceInfo = deviceInfo;
camera_metadata_entry partialResultsCount =
mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (partialResultsCount.count > 0) {
mNumPartialResults = partialResultsCount.data.i32[0];
}
mResultBatcher.setNumPartialResults(mNumPartialResults);
camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find(
ANDROID_CONTROL_AE_LOCK_AVAILABLE);
if (aeLockAvailableEntry.count > 0) {
mIsAELockAvailable = (aeLockAvailableEntry.data.u8[0] ==
ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE);
}
// Determine whether we need to derive sensitivity boost values for older devices.
// If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
// be listed (as the default value 100)
if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
mDerivePostRawSensKey = true;
}
mInitFail = initialize();
}
在该函数中调用了initialize()函数:
ini
bool CameraDeviceSession::initialize() {
/** Initialize device with callback functions */
ATRACE_BEGIN("camera3->initialize");
status_t res = mDevice->ops->initialize(mDevice, this);
ATRACE_END();
if (res != OK) {
ALOGE("%s: Unable to initialize HAL device: %s (%d)",
__FUNCTION__, strerror(-res), res);
mDevice->common.close(&mDevice->common);
mClosed = true;
return true;
}
int32_t reqFMQSize = property_get_int32("ro.camera.req.fmq.size", /*default*/-1);
if (reqFMQSize < 0) {
reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE;
} else {
ALOGV("%s: request FMQ size overridden to %d", __FUNCTION__, reqFMQSize);
}
mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
static_cast<size_t>(reqFMQSize),
false /* non blocking */);
if (!mRequestMetadataQueue->isValid()) {
ALOGE("%s: invalid request fmq", __FUNCTION__);
return true;
}
int32_t resFMQSize = property_get_int32("ro.camera.res.fmq.size", /*default*/-1);
if (resFMQSize < 0) {
resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE;
} else {
ALOGV("%s: result FMQ size overridden to %d", __FUNCTION__, resFMQSize);
}
mResultMetadataQueue = std::make_shared<RequestMetadataQueue>(
static_cast<size_t>(resFMQSize),
false /* non blocking */);
if (!mResultMetadataQueue->isValid()) {
ALOGE("%s: invalid result fmq", __FUNCTION__);
return true;
}
mResultBatcher.setResultMetadataQueue(mResultMetadataQueue);
return false;
}
在该函数中做了3件事:
- 调用mDevice->ops->initialize()函数初始化;
- 创建RequestMetadataQueue;
- 创建ResultMetadataQueue;
后两个逻辑其实就是对应于Camera3Device::initialize()函数中的session->getCaptureRequestMetadataQueue函数和session->getCaptureResultMetadataQueue函数;
我们主要分析一下mDevice->ops->initialize()函数逻辑:
c
typedef struct camera3_device_ops {
int (*initialize)(const struct camera3_device *,
const camera3_callback_ops_t *callback_ops);
int (*configure_streams)(const struct camera3_device *,
camera3_stream_configuration_t *stream_list);
int (*register_stream_buffers)(const struct camera3_device *,
const camera3_stream_buffer_set_t *buffer_set);
const camera_metadata_t* (*construct_default_request_settings)(
const struct camera3_device *,
int type);
int (*process_capture_request)(const struct camera3_device *,
camera3_capture_request_t *request);
void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,
vendor_tag_query_ops_t* ops);
void (*dump)(const struct camera3_device *, int fd);
int (*flush)(const struct camera3_device *);
/* reserved for future use */
void *reserved[8];
} camera3_device_ops_t;
typedef struct camera3_device {
hw_device_t common;
camera3_device_ops_t *ops;
void *priv;
} camera3_device_t;
通过上述结构体可知,initialize其实是一个指针函数,其最终的实现在QCamera3HardwareInterface中:
这个需要注意,qcom/camera是Android原生的hal层逻辑,各个厂商会根据自己的业务,重新定义新的hal层逻辑,所以在后续的分析中,最好可以根据项目定义的hal分析流程。
/hardware/qcom/camera/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
scss
int QCamera3HardwareInterface::initialize(
const struct camera3_callback_ops *callback_ops)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
int rc;
LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
pthread_mutex_lock(&mMutex);
// Validate current state
switch (mState) {
case OPENED:
/* valid state */
break;
default:
LOGE("Invalid state %d", mState);
rc = -ENODEV;
goto err1;
}
rc = initParameters();
if (rc < 0) {
LOGE("initParamters failed %d", rc);
goto err1;
}
mCallbackOps = callback_ops;
// mCameraHandle->camera_handle:camera句柄
// 第一个channel
mChannelHandle = mCameraHandle->ops->add_channel(
mCameraHandle->camera_handle, NULL, NULL, this);
if (mChannelHandle == 0) {
LOGE("add_channel failed");
rc = -ENOMEM;
pthread_mutex_unlock(&mMutex);
return rc;
}
pthread_mutex_unlock(&mMutex);
mCameraInitialized = true;
mState = INITIALIZED;
LOGI("X");
return 0;
err1:
pthread_mutex_unlock(&mMutex);
return rc;
}
该函数必须在camera_module_t中的open函数之后,其他camera3_device_ops中函数之前被调用,主要用来将上层实现的回调函数注册到HAL中,并且根据需要在该函数中加入自定义的一些初始化操作;
channel主要用于管理streams,一个channel可以对应多个stream进行操作;
第一个channel在QCamera3HardwareInterface::initialize函数中通过add_channel添加到mm_camera中;
在该函数中调用了mCameraHandle->ops->add_channel函数,最终调用了mm_camera.c中的mm_camera_add_channel函数:
ini
uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
mm_channel_t *ch_obj = NULL;
uint8_t ch_idx = 0;
uint32_t ch_hdl = 0;
// 取出 HAL3 中的 mm_camera_obj_t 指向的 channel[],这是个数组,最多可以保存6个信道
for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
ch_obj = &my_obj->ch[ch_idx];
break;
}
}
// 初始化这个信道
if (NULL != ch_obj) {
/* initialize channel obj */
// 初始化 channel 结构体
memset(ch_obj, 0, sizeof(mm_channel_t));
ch_hdl = mm_camera_util_generate_handler_by_num(my_obj->my_num, ch_idx);
ch_obj->my_hdl = ch_hdl;
ch_obj->state = MM_CHANNEL_STATE_STOPPED;
ch_obj->cam_obj = my_obj;
pthread_mutex_init(&ch_obj->ch_lock, NULL);
ch_obj->sessionid = my_obj->sessionid;
mm_channel_init(ch_obj, attr, channel_cb, userdata);
}
pthread_mutex_unlock(&my_obj->cam_lock);
return ch_hdl;
}
紧接着调用了mm_channel_init函数:
ini
int32_t mm_channel_init(mm_channel_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
int32_t rc = 0;
my_obj->bundle.super_buf_notify_cb = channel_cb;
my_obj->bundle.user_data = userdata;
if (NULL != attr) {
my_obj->bundle.superbuf_queue.attr = *attr;
}
my_obj->num_s_cnt = 0;
memset(&my_obj->frame_sync, 0, sizeof(my_obj->frame_sync));
pthread_mutex_init(&my_obj->frame_sync.sync_lock, NULL);
mm_muxer_frame_sync_queue_init(&my_obj->frame_sync.superbuf_queue);
my_obj->bundle.is_cb_active = 1;
LOGD("Launch data poll thread in channel open");
snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
MM_CAMERA_POLL_TYPE_DATA);
/* change state to stopped state */
my_obj->state = MM_CHANNEL_STATE_STOPPED;
return rc;
}
在该函数中调用了mm_camera_poll_thread_launch函数,用于处理MM_CAMERA_POLL_TYPE_DATA事件,该函数用于在打开的通信中启动数据轮询线程(CAM_dataPoll线程);
其实在QCamera3ReprocessChannel::initialize函数中也调用了add_channel函数,除此之外其他HAL层channel都不会再add_channel,也就是说上层的很多channel到底层其实就是一个channel管理了所有的stream;
其实这里的channel_cb是null,这个可以从QCamera3HardwareInterface::initialize函数中可知,因为在这个函数中调用该函数的时候,传入的channel_cb为null;
scss
int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
mm_camera_poll_thread_type_t poll_type)
{
int32_t rc = 0;
size_t i = 0, cnt = 0;
poll_cb->poll_type = poll_type;
//Initialize poll_fds
cnt = sizeof(poll_cb->poll_fds) / sizeof(poll_cb->poll_fds[0]);
for (i = 0; i < cnt; i++) {
poll_cb->poll_fds[i].fd = -1;
}
//Initialize poll_entries
cnt = sizeof(poll_cb->poll_entries) / sizeof(poll_cb->poll_entries[0]);
for (i = 0; i < cnt; i++) {
poll_cb->poll_entries[i].fd = -1;
}
//Initialize pipe fds
poll_cb->pfds[0] = -1;
poll_cb->pfds[1] = -1;
// 通过fds建立pipeline(现在还是无效的)
rc = pipe(poll_cb->pfds);
if(rc < 0) {
LOGE("pipe open rc=%d\n", rc);
return -1;
}
poll_cb->timeoutms = -1; /* Infinite seconds */
LOGD("poll_type = %d, read fd = %d, write fd = %d timeout = %d",
poll_cb->poll_type,
poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
pthread_mutex_init(&poll_cb->mutex, NULL);
pthread_cond_init(&poll_cb->cond_v, NULL);
/* launch the thread */
pthread_mutex_lock(&poll_cb->mutex);
poll_cb->status = 0;
pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
if(!poll_cb->status) {
pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
}
pthread_mutex_unlock(&poll_cb->mutex);
LOGD("End");
return rc;
}
在该函数中创建了一个pipeline,然后创建一个线程,调用mm_camera_poll_thread函数:
scss
static void *mm_camera_poll_thread(void *data)
{
mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
mm_camera_cmd_thread_name(poll_cb->threadName);
/* add pipe read fd into poll first */
poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
mm_camera_poll_sig_done(poll_cb);
mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
return mm_camera_poll_fn(poll_cb);
}
该函数返回mm_camera_poll_fn函数,实际上是开启了线程体:
scss
static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
{
int rc = 0, i;
if (NULL == poll_cb) {
LOGE("poll_cb is NULL!\n");
return NULL;
}
LOGD("poll type = %d, num_fd = %d poll_cb = %p\n",
poll_cb->poll_type, poll_cb->num_fds,poll_cb);
// 进行循环
do {
for(i = 0; i < poll_cb->num_fds; i++) {
poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
}
// 从管道获取数据
rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
if(rc > 0) {
if ((poll_cb->poll_fds[0].revents & POLLIN) &&
(poll_cb->poll_fds[0].revents & POLLRDNORM)) {
/* if we have data on pipe, we only process pipe in this iteration */
LOGD("cmd received on pipe\n");
mm_camera_poll_proc_pipe(poll_cb);
} else {
// 当数据读取完毕后
for(i=1; i<poll_cb->num_fds; i++) {
/* Checking for ctrl events */
if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
(poll_cb->poll_fds[i].revents & POLLPRI)) {
LOGD("mm_camera_evt_notify\n");
if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
}
}
if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
(poll_cb->poll_fds[i].revents & POLLIN) &&
(poll_cb->poll_fds[i].revents & POLLRDNORM)) {
LOGD("mm_stream_data_notify\n");
if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
// 会回调notify_cb提醒stream buffer已经填充
poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
}
}
}
}
} else {
/* in error case sleep 10 us and then continue. hard coded here */
usleep(10);
continue;
}
} while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
return NULL;
}
在该函数中在正常的逻辑在会调用mm_camera_poll_proc_pipe函数:
rust
static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
{
ssize_t read_len;
int i;
mm_camera_sig_evt_t cmd_evt;
// 从管道中读取cmd_evt(在调用mm_camera_poll_thread_add_poll_fd时会向管道写入)
read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
LOGD("read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
switch (cmd_evt.cmd) {
case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
/* we always have index 0 for pipe read */
poll_cb->num_fds = 0;
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type &&
poll_cb->num_fds < MAX_STREAM_NUM_IN_BUNDLE) {
if (poll_cb->poll_entries[0].fd >= 0) {
/* fd is valid, we update poll_fds */
// 把stream里的fd赋值
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
}
} else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type &&
poll_cb->num_fds <= MAX_STREAM_NUM_IN_BUNDLE) {
for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
if(poll_cb->poll_entries[i].fd >= 0) {
/* fd is valid, we update poll_fds to this fd */
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
} else {
/* fd is invalid, we set the entry to -1 to prevent polling.
* According to spec, polling will not poll on entry with fd=-1.
* If this is not the case, we need to skip these invalid fds
* when updating this array.
* We still keep fd=-1 in this array because this makes easier to
* map cb associated with this fd once incoming data avail by directly
* using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
poll_cb->poll_fds[poll_cb->num_fds].events = 0;
poll_cb->num_fds++;
}
}
}
if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
mm_camera_poll_sig_done(poll_cb);
break;
case MM_CAMERA_PIPE_CMD_COMMIT:
mm_camera_poll_sig_done(poll_cb);
break;
case MM_CAMERA_PIPE_CMD_EXIT:
default:
mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
mm_camera_poll_sig_done(poll_cb);
break;
}
}
至此,channel的add操作流程就分析完成了,camera的管道就创建完成,接下来就是等待stream的添加;