camera provider
hardware\interfaces\camera\provider\2.4\ICameraProvider.hal
//定义了cameraservice访问provider的接口
hardware\interfaces\camera\provider\2.4\ICameraProviderCallback.hal
//定义了provider访问cameraservice的回调接口
目录
APP
Libui
gralloc
cameraservice
HAL
驱动
点亮Camera
高通平台硬件模块
V4L2
media子系统
内核内存管理
分页管理
伙伴系统
内核申请内存函数
DMA-BUFF
ION内存
相机算法
DisplayManagerService
CameraX
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i(TAG,"checkSelfPermission");
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, CAMERA_REQUEST_CODE);
return;
} //获取相机权限
mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); //获取cameramanager
mCameraManager.openCamera("0", mCameraDeviceStateCallback, mhandler); //打开相机
mBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); //mCameraDeviceStateCallback 回调里面创建CaptureRequest.Builder
mCameraDevice.createCaptureSession(outputs,mCameraCaptureSessionStateCallback, mhandler);
//创建capturesession
session.setRepeatingRequest(mBuilder.build(),mCameraCaptureSessionCaptureCallback, mhandler); // mCameraCaptureSessionStateCallback里面开始request
mImageReader = ImageReader.newInstance(4000, 3000, ImageFormat.JPEG, 5);//构建imagereader
mImageReader.getSurface();//imageReader的surface,用来获取拍照数据;
//imagereader获取数据的回调
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
ImageReaderCount++;
Log.i(TAG, "onImageAvailable"+ImageReaderCount);
Image image = reader.acquireNextImage();
//Log.i(TAG,""+image.toString()+ "fromat:"+image.getFormat());
image.close();
}
}, mhandler);
http://www.voidcn.com/article/p-ybtqqlea-tu.html
libui库提供了Surface图形界面框架,包括上层应用的调用接口和与SurfaceFlinger库的通信接口,图形界面的具体实现由SurfaceFlinger库完成。Android上层应用的调用接口主要包含SurfaceComposerClient、
SurfaceControl和Surface三个主要数据结构。
https://www.jianshu.com/p/eb19ef10d8d9
/* buffer will be used as an OpenGL ES texture */
GRALLOC_USAGE_HW_TEXTURE = 0x00000100,
/* buffer will be used as an OpenGL ES render target */
GRALLOC_USAGE_HW_RENDER = 0x00000200,
/* buffer will be used by the 2D hardware blitter */
GRALLOC_USAGE_HW_2D = 0x00000400,
/* buffer will be used by the HWComposer HAL module */
GRALLOC_USAGE_HW_COMPOSER = 0x00000800,
/* buffer will be used with the framebuffer device */
GRALLOC_USAGE_HW_FB = 0x00001000,
/* buffer will be used with the HW video encoder */
GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000,
/* buffer will be written by the HW camera pipeline */
GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000,
/* buffer will be read by the HW camera pipeline */
GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000,
/* buffer will be used as part of zero-shutter-lag queue */
GRALLOC_USAGE_HW_CAMERA_ZSL = 0x00060000,
/* mask for the camera access values */
GRALLOC_USAGE_HW_CAMERA_MASK = 0x00060000,
/* mask for the software usage bit-mask */
GRALLOC_USAGE_HW_MASK = 0x00071F00,
Gralloc的含义为是Graphics Alloc(图形分配)。他对上为libui提供服务,为其分配显存,刷新显示等。对下对framebuffer进行管理。
framebuffer是出现在内核中的驱动程序的接口。
他把显示设备抽象的映射到帧缓冲区——可以认为是显存!
Camera3stream:
status_t Camera3Device::configureStreamsLocked(int operatingMode,
const CameraMetadata& sessionParams, bool notifyRequestThread)
Camera3device
Camera3Device::requestStreamBuffers
从摄像头传输框架请求缓冲区
Camera3Device::returnStreamBuffers
将额外的缓冲区返回到摄像头传输框架
Camera3BufferManager
Camera3Stream::prepareNextBuffer
Camera3OutputStream::getBufferLocked
FenceFD
上面参数usage用来描述要分配的图形缓冲区的用途。
GRALLOC_USAGE_HW_FB,那么就必须要系统帧缓冲区中分配,即framebuffer中
目前大多数采用android的ION机制分配共享的graphicbuffer。。
status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd)
ANativeWindowBuffer ANativeWindow Surface
https://blog.csdn.net/sunnytina/article/details/52370837
CameraDeviceClient
CameraOfflineSessionClient
binder::Status H2BCameraDeviceCallbacks::onResultReceived(
}
H2BCameraDeviceCallbacks::H2BCameraDeviceCallbacks(const sp& base) : CBase(base) { }
/frameworks/av/services/camera/libcameraservice/hidl/HidlCameraService.cpp
Return HidlCameraService::connectDevice(const sp& hCallback,
const hidl_string& cameraId,
connectDevice_cb _hidl_cb) {
sp hybridCallbacks = new H2BCameraDeviceCallbacks(hCallback);
}
/frameworks/hardware/interfaces/cameraservice/service/2.0/ICameraService.hal
interface ICameraService {
connectDevice(ICameraDeviceCallback callback, string cameraId)
generates (Status status, ICameraDeviceUser device)
}
//libcamera/moudle 下的例子
v4l2_camera_hal.cpp
static hw_module_methods_t v4l2_module_methods = {
.open = v4l2_camera_hal::open_dev};
//inflight队列,APPrequest的放入队列中,然后有一个enqueue线程不停的从该队列中获取request后,用自己申请的内存调用IOCTL QBUF到V4L2框架中,然后另外一个dequeue线程从V4L2中 DQBUF后,将获取的buff数据拷贝到graphicbuff中,调用process_capture_result返回到系统中;
reques携带的buffer来源实际上是”通过Surface向display service(进程名:vendor.qti.hardware.display.allocator-service)申请的GraphicBuffer” 。
ImageBufferManager
Buffermanager
CSL设备的CSLBuffer CSL是Hw camera module的CSL内存
Gralloc的GrallocBuffer Gralloc是Hw Gralloc module的内存
Gralloc分配的内存需要映射到CSL设备后,供Camera使用。
metadata创建
std::unique_ptr metadata;
int res = GetV4L2Metadata(v4l2_wrapper, &metadata);//构建meta
PartialMetadataSet components;
V4L2Camera(id, std::move(v4l2_wrapper), std::move(metadata));//构建每个V4L2的metadata;
HAL统计了所有meta,
metadata的填充
metadata_->FillResultMetadata(&request->settings);
metadata->append(additional_metadata);
component->PopulateDynamicFields(&additional_metadata);
UpdateMetadata(metadata, delegate_->tag(), value);
metadata数据流
返回到framework
//=======================================================
/frameworks/av/camera/CameraMetadata.cpp
86 const camera_metadata_t* CameraMetadata::getAndLock() const {
87 mLocked = true;
88 return mBuffer;
89 }
//=======================================================
/system/media/camera/src/camera_metadata.c
update_camera_metadata_entry
Bring Up Actuator:https://blog.csdn.net/Mr_ZJC/article/details/105736925
csid
csiphy
vfe
ret = devm_request_irq(dev, vfe->irq, vfe->ops->isr,IRQF_TRIGGER_RISING, vfe->irq_name, vfe); //申请中断函数
static void vfe_isr_comp_done(struct vfe_device *vfe, u8 comp)
{
unsigned int i;
for (i = 0; i < ARRAY_SIZE(vfe->wm_output_map); i++)
if (vfe->wm_output_map[i] == VFE_LINE_PIX) {
vfe_isr_wm_done(vfe, i);
break;
}
}
vfe_isr_wm_done()
{
vb2_buffer_done(); //完成buff处理
}
struct video_device
{
#if defined(CONFIG_MEDIA_CONTROLLER)
struct media_entity entity;
struct media_intf_devnode *intf_devnode;
struct media_pipeline pipe;
#endif
const struct v4l2_file_operations *fops;
u32 device_caps;
/* sysfs */
struct device dev;
struct cdev *cdev;
struct v4l2_device *v4l2_dev;
struct device *dev_parent;
struct v4l2_ctrl_handler *ctrl_handler;
struct vb2_queue *queue;
struct v4l2_prio_state *prio;
/* device info */
char name[32];
enum vfl_devnode_type vfl_type;
enum vfl_devnode_direction vfl_dir;
int minor;
u16 num;
unsigned long flags;
int index;
/* V4L2 file handles */
spinlock_t fh_lock;
struct list_head fh_list;
int dev_debug;
v4l2_std_id tvnorms;
/* callbacks */
void (*release)(struct video_device *vdev);
const struct v4l2_ioctl_ops *ioctl_ops;
DECLARE_BITMAP(valid_ioctls, BASE_VIDIOC_PRIVATE);
struct mutex *lock;
};
static const struct file_operations v4l2_fops = {
.owner = THIS_MODULE,
.read = v4l2_read,
.write = v4l2_write,
.open = v4l2_open,
.get_unmapped_area = v4l2_get_unmapped_area,
.mmap = v4l2_mmap,
.unlocked_ioctl = v4l2_ioctl,
#ifdef CONFIG_COMPAT
.compat_ioctl = v4l2_compat_ioctl32,
#endif
.release = v4l2_release,
.poll = v4l2_poll,
.llseek = no_llseek,
};
static int v4l2_open(struct inode *inode, struct file *filp)
{
ret = vdev->fops->open(filp); //调用平台填充的fops
}
video_open()
{
v4l2_fh_init(vfh, vdev);
v4l2_fh_add(vfh); //Describes a V4L2 file handler
ret = v4l2_pipeline_pm_get(&vdev->entity); //power up
}
v4l2_pipeline_pm_use()
{
pipeline_pm_power();//遍历图去power up每一个节点;
}
pipeline_pm_power_one();
IOCTL
v4l2-ioctl.c
static const struct v4l2_ioctl_info v4l2_ioctls[] = {
IOCTL_INFO(VIDIOC_QUERYCAP, v4l_querycap, v4l_print_querycap, 0),
IOCTL_INFO(VIDIOC_ENUM_FMT, v4l_enum_fmt, v4l_print_fmtdesc, 0),
IOCTL_INFO(VIDIOC_G_FMT, v4l_g_fmt, v4l_print_format, 0),
...
};//存了所有命令对应的函数;
static long __video_do_ioctl(struct file *file,
unsigned int cmd, void *arg)()
{
}
//分析VIDIOC_QBUF
v4l_dqbuf(){ops->vidioc_dqbuf} //调用到注册video_device时候填充的ioctl_ops;
vdev->ioctl_ops = &msm_vid_ioctl_ops;
static const struct v4l2_ioctl_ops msm_vid_ioctl_ops = {
.vidioc_qbuf = vb2_ioctl_qbuf,//填充的函数是框架里面的函数,又调回去框架层
};
//videobuf2-v4l2.c
vb2_ioctl_qbuf()
{
return vb2_qbuf(vdev->queue, vdev->v4l2_dev->mdev, p); //传入vb2_queue,mediadevice
}
int vb2_qbuf(struct vb2_queue *q, struct media_device *mdev,
struct v4l2_buffer *b)
{
ret = vb2_queue_or_prepare_buf(q, mdev, b, false, &req); //
ret = vb2_core_qbuf(q, b->index, b, req); //
if (req)
media_request_put(req);
}
vb2_queue_or_prepare_buf()
{
//校验buff是否合法,检测各种参数
req = media_request_get_by_fd(mdev, b->request_fd);//request_fd根据这个拿到file里面的私有数据,里面存放了request;
}
vb2_core_qbuf()
{
if (req) {
q->uses_requests = 1;
media_request_object_bind
media_request_get();
__fill_v4l2_buffer();//将来vb2_queue队列里面的BUFF填充到V4L2_buff;
return ;
}
list_add_tail(&vb->queued_entry, &q->queued_list); //加入到queued_list;
vb2_start_streaming(q);
}
vb2_queue_init 填充buf_ops
vb2_start_streaming()
{
list_for_each_entry(vb, &q->queued_list, queued_entry) //处理queued_list队列里面的buff;
__enqueue_in_driver(vb);
q->start_streaming_called = 1;
ret = call_qop(q, start_streaming, q,
atomic_read(&q->owned_by_drv_count));//调用驱动start_streaming,
}
__enqueue_in_driver()
{
call_void_vb_qop(vb, buf_queue, vb); //调回驱动层填充的buff_queue的buf_queue函数;
}
//=================vfe============================//
video_out->ops = &camss_vfe_video_ops; //驱动填充
static int vfe_queue_buffer(struct camss_video *vid,
struct camss_buffer *buf)
{
vfe_buf_update_wm_on_new(vfe, output, buf);//将地址写入寄存器中
}
static int video_start_streaming(struct vb2_queue *q, unsigned int count)
{
media_pipeline_start();
ret = v4l2_subdev_call(subdev, video, s_stream, 1); //连接在一起的subdev都s_stream
}
//分析VIDIOC_DQBUF
//分析VIDIOC_QUERYBUF
v4l2_buffer device_buffer;
// Use QUERYBUF to ensure our buffer/device is in good shape,
// and fill out remaining fields.
if (IoctlLocked(VIDIOC_QUERYBUF, &device_buffer) < 0)
//VFE硬件处理完成后产生中断
vfe_isr_wm_done()
{
ready_buf = output->buf[!active_index]; //获取完成的buff
output->buf[!active_index] = vfe_buf_get_pending(output);//将nextbuff写入硬件
vb2_buffer_done(&ready_buf->vb.vb2_buf, VB2_BUF_STATE_DONE);//表示完成该buff;
}
vb2_buffer_done()
{
list_add_tail(&vb->done_entry, &q->done_list);//添加到done_list
wake_up(&q->done_wq);//通知其他等待buff的线程
}
configureStreams
setupStreams
SetFormat
RequestBuffers
VIDIOC_REQBUFS
VIDIOC_REQBUFS:分配内存
VIDIOC_QUERYBUF:把VIDIOC_REQBUFS中分配的数据缓存转换成物理地址
VIDIOC_QUERYCAP:查询驱动功能
VIDIOC_ENUM_FMT:获取当前驱动支持的视频格式
VIDIOC_S_FMT:设置当前驱动的频捕获格式
VIDIOC_G_FMT:读取当前驱动的频捕获格式
VIDIOC_TRY_FMT:验证当前驱动的显示格式
VIDIOC_CROPCAP:查询驱动的修剪能力
VIDIOC_S_CROP:设置视频信号的边框
VIDIOC_G_CROP:读取视频信号的边框
VIDIOC_QBUF:把数据从缓存中读取出来
VIDIOC_DQBUF:把数据放回缓存队列
VIDIOC_STREAMON:开始视频显示函数
VIDIOC_STREAMOFF:结束视频显示函数
VIDIOC_QUERYSTD:检查当前视频设备支持的标准,例如PAL或NTSC。
REQUEST_ALLOC
media_device_request_alloc
media_request_alloc
vb2_core_dqbuf
media_request_put
media_request_clean
media_create_pad_link
media_pipeline_start
__media_pipeline_start
media_graph_walk_start
media_graph_walk_next
VIDIOC_ENUM_FMT
VIDIOC_QBUF:和驱动交换一个BUFFER;
VIDIOC_DQBUF
luvcview -L
v4l2_device_register
v4l2_ctrl_handler_init
v4l2_ctrl_new_std
v4l2_ctrl_handler_setup
videobuf_queue_vmalloc_init
video_register_device
Videobuf-core.c
videobuf_read_one
struct videobuf_queue{
struct videobuf_qtype_ops
}
videobuf_alloc_vb: kzalloc 申请videobuff
videobuf_dqbuf
struct videobuf_queue_ops
struct video_device
VIDIOC_REQBUFS:V4L2_MEMORY_USERPTR
struct videobuf_queue
struct videobuf_buffer
struct videobuf_queue_ops
V4L2_MEMORY_USERPTR
v4l2_device_register_subdev
media_create_pad_link
MEDIA_IOC_ENUM_ENTITIES
vb2_buffer_done:
处理完之后放入done_list,
list_add_tail(&vb->done_entry, &q->done_list);
vb2_core_streamon
vb2_start_streaming
ret = call_qop(q, start_streaming, q,atomic_read(&q->owned_by_drv_count));
vb2_core_dqbuf
__vb2_get_done_vb
__vb2_wait_for_done_vb
video_register_device()
{
vdev->cdev = cdev_alloc(); //分配一个cdev;
vdev->cdev->ops = &v4l2_fops; //fops
ret = cdev_add(vdev->cdev, MKDEV(VIDEO_MAJOR, vdev->minor), 1); //
ret = device_register(&vdev->dev); //注册device
v4l2_device_get(vdev->v4l2_dev); //增加V4L2设备的引用次数
ret = video_register_media_controller(vdev); //注册entity
set_bit(V4L2_FL_REGISTERED, &vdev->flags); //激活
}
v4l2_device:这个是整个输入设备的总结构体,可以认为它是整个 V4L2 框架的入口,充当驱动的管理者以及入口监护人。由该结构体引申出来 v4l2_subdev。用于视频输入设备整体的管理,有多少输入设备就有多少个v4l2_device抽象(比如一个USB摄像头整体就可以看作是一个 V4L2 device)。再往下分是输入子设备,对应的是例如 ISP、CSI、MIPI 等设备,它们是从属于一个 V4L2 device 之下的。
media_device:用于运行时数据流的管理,嵌入在 V4L2 device 内部,运行时的意思就是:一个 V4L2 device 下属可能有非常多同类型的子设备(两个或者多个 sensor、ISP 等),那么在设备运行的时候我怎么知道我的数据流需要用到哪一个类型的哪一个子设备呢。这个时候就轮到 media_device 出手了,它为这一坨的子设备建立一条虚拟的连线,建立起来一个运行时的 pipeline(管道),并且可以在运行时动态改变、管理接入的设备。
v4l2_ctrl_handler:控制模块,提供子设备(主要是 video 和 ISP 设备)在用户空间的特效操作接口,比如你想改变下输出图像的亮度、对比度、饱和度等等,都可以通过这个来完成。
vb2_queue:提供内核与用户空间的 buffer 流转接口,输入设备产生了一坨图像数据,在内核里面应该放在哪里呢?能放几个呢?是整段连续的还是还是分段连续的又或者是物理不连续的?用户怎么去取用呢?都是它在管理。
Mm_struct
内存划分模型
https://www.cnblogs.com/walter-huang/p/4275723.html
>=0xc000 0000 内核虚拟存储器
<0xc000 0000 Stack(用户栈)
>=0x4000 0000 文件映射区
<0x4000 0000 Heap(运行时堆) 通过brk/sbrk系统调用扩大堆,向上增长。
.data、.bss(读写段) 从可执行文件中加载
>=0x0804 8000 .init、.text、.rodata(只读段) 从可执行文件中加载
<0x0804 8000 保留区域
DAM
内核管理内存方式
页
cat /proc/vmallocinfo
cat /proc/meminfo
cat /proc/slabinfo
struct kmem_cache
匿名页
匿名页的反向映射
一个PTE(Page Table Entry)对应一个物理页
每个进程 PT
kmem_cache_alloc_node cgroup,进程调度,内存管理,cpu抢占等细节
slab_alloc_node
cgroup空间
刷cache
为了避免内存碎片化,或者为一些有着特殊内存需求的硬件,比如GPUs、display controller以及camera等,在系统启动的时候,会为他们预留一些memory pools,这些memory pools就由ION来管理。通过ION就可以在硬件以及user space之间实现zero-copy的内存share。
ION 的前任是 PMEM
ION 是在各种 heaps 上分配内存,通过 ion_buffer 来描述所分配的内存
ION 是为了解决内存碎片管理而引入的通用内存管理器
struct ion_device、struct ion_client、struct ion_heap、struct ion_handle 和 struct ion_buffer
struct ion_heap_ops { int (*allocate) (struct ion_heap *heap, struct ion_buffer *buffer, unsigned long len, unsigned long align, unsigned long flags); void (*free) (struct ion_buffer *buffer); int (*phys) (struct ion_heap *heap, struct ion_buffer *buffer,
ion_phys_addr_t *addr, size_t *len); struct scatterlist *(*map_dma) (struct ion_heap *heap, struct ion_buffer *buffer); void (*unmap_dma) (struct ion_heap *heap,
struct ion_buffer *buffer); void * (*map_kernel) (struct ion_heap *heap,
struct ion_buffer *buffer); void (*unmap_kernel) (struct ion_heap *heap,
struct ion_buffer *buffer); int (*map_user) (struct ion_heap *heap, struct ion_buffer *buffer, struct vm_area_struct *vma);
};
比较ION和DMABUF
· ION和DMABUF都是通过传递一个匿名file descriptor对象,给其他client一个基于引用计数的访问权限,从而达到分享内存的目的。
· · ION通过一个可分享和追踪的方式从预留的memory pool中分配内存。
· · DMABUF更多的专注于buffer导入、导出以及同步的方式来实现在NON-ARM架构上的buffer的分享。
· · ION目前只支持Android kernel
· · ION所有的user-space program都可以通过/dev/ion接口来分配ION内存。但是在Android会通过验证user和group IDs的方式来阻止对ION的非授权访问。
GPU实现
算法实现
主副摄帧同步:
美颜
夜景
超分
场景检测
视频防抖
多帧降噪
https://developer.android.google.cn/training/camerax/vendor-extensions
等待队列
wait_queue_head_t
struct wait_queue_head {
spinlock_t lock;
struct list_head head;
};
init_waitqueue_head(&q->done_wq);//初始化等待队列
ret = wait_event_interruptible(q->done_wq,!list_empty(&q->done_list) || !q->streaming ||q->error);
wait_event(q->done_wq, !atomic_read(&q->owned_by_drv_count));
wake_up_all(&q->done_wq);
poll_wait(file, &q->done_wq, wait);
//============================
#define wait_event_interruptible(wq_head, condition) \
({ \
int __ret = 0; \
might_sleep(); \
if (!(condition)) \
__ret = __wait_event_interruptible(wq_head, condition); \
__ret; \
})
#define __wait_event_interruptible(wq_head, condition) \
___wait_event(wq_head, condition, TASK_INTERRUPTIBLE, 0, 0, \
schedule())
#define ___wait_event(wq_head, condition, state, exclusive, ret, cmd) \
({ \
__label__ __out; \
struct wait_queue_entry __wq_entry; \
long __ret = ret; /* explicit shadow */ \
\
init_wait_entry(&__wq_entry, exclusive ? WQ_FLAG_EXCLUSIVE : 0); \
for (;;) { \
long __int = prepare_to_wait_event(&wq_head, &__wq_entry, state);\
\
if (condition) \
break; \
\
if (___wait_is_interruptible(state) && __int) { \
__ret = __int; \
goto __out; \
} \
\
cmd; \
} \
finish_wait(&wq_head, &__wq_entry); \
__out: __ret; \
})
AN:
硬件模块(CSI IFE IPE )之间buff如何传递?
STAT统计模块的信息如何获取?
3A算法如何运行?
硬件IPE输出多路流,硬件直出,pipeline不同的,LINK不同,
request buff的时候,对应的PAD不同?
获取不同pipeline的硬件输出是同步还是异步?软件流程IOCTL的是同一个FD吗?
ImageReader返回的metadata在哪里?
多摄流程?切换摄像头的操作是在哪里实现的?
V4L2的videobuff QUEUE 有几个?
media device设备节点干嘛用?
static const struct media_ioctl_info ioctl_info[] = {
MEDIA_IOC(DEVICE_INFO, media_device_get_info, MEDIA_IOC_FL_GRAPH_MUTEX),
MEDIA_IOC(ENUM_ENTITIES, media_device_enum_entities, MEDIA_IOC_FL_GRAPH_MUTEX),
MEDIA_IOC(ENUM_LINKS, media_device_enum_links, MEDIA_IOC_FL_GRAPH_MUTEX),
MEDIA_IOC(SETUP_LINK, media_device_setup_link, MEDIA_IOC_FL_GRAPH_MUTEX),
//回调注册midia entity时候填充的ops
MEDIA_IOC(G_TOPOLOGY, media_device_get_topology, MEDIA_IOC_FL_GRAPH_MUTEX),
MEDIA_IOC(REQUEST_ALLOC, media_device_request_alloc, 0),
};
static const struct media_entity_operations vfe_media_ops = {
.link_setup = vfe_link_setup,
.link_validate = v4l2_subdev_link_validate,
};
int __must_check __media_device_register(struct media_device *mdev,
struct module *owner)
{
struct media_devnode *devnode;
int ret;
devnode = kzalloc(sizeof(*devnode), GFP_KERNEL);
if (!devnode)
return -ENOMEM;
/* Register the device node. */
mdev->devnode = devnode;
devnode->fops = &media_device_fops;
devnode->parent = mdev->dev;
devnode->release = media_device_release;
/* Set version 0 to indicate user-space that the graph is static */
mdev->topology_version = 0;
ret = media_devnode_register(mdev, devnode, owner);
if (ret < 0) {
/* devnode free is handled in media_devnode_*() */
mdev->devnode = NULL;
return ret;
}
ret = device_create_file(&devnode->dev, &dev_attr_model);
if (ret < 0) {
/* devnode free is handled in media_devnode_*() */
mdev->devnode = NULL;
media_devnode_unregister_prepare(devnode);
media_devnode_unregister(devnode);
return ret;
}
dev_dbg(mdev->dev, "Media device registered\n");
return 0;
}
VIDIOC_QUERY_EXT_CTRL
ctrl_handler
v4l2_ctrl_handler_init
media_request_object_init(&hdl->req_obj);
//========================linux-5.9.10\drivers\media\i2c\imx274.c
设置曝光
v4l2_ctrl_s_ctrl(priv->ctrls.exposure, priv->ctrls.exposure->val);
v4l2_ctrl_new_std
D:\linux\linux-5.9.10\include\uapi\linux\v4l2-controls.h
#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17)
#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18)
#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19)
//定义了controlID;
D:\linux\linux-5.9.10\drivers\media\v4l2-core\v4l2-ctrls.c
ctrls->exposure_bias = v4l2_ctrl_new_int_menu(hdl, ops,
V4L2_CID_AUTO_EXPOSURE_BIAS,
ARRAY_SIZE(ev_bias_qmenu) - 1,
ARRAY_SIZE(ev_bias_qmenu)/2 - 1,
ev_bias_qmenu); //创建对应的ctrl
//===================================HAL===========================================
1、从驱动中获取的metadata;IOCTL获取;
2、
android/hardware/camera2/CameraMetadata.java
result.getPartialResults();
result.getPartialResults().get(0).get(CaptureResult.CONTROL_AE_TARGET_FPS_RANGE);
metadata->update
//===================================framework===========================================
/frameworks/av/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
auto ret = converter->mBase->onResultReceived(hResult, hResultExtras, in processResultMessage();//回调传入native metadata;
public void onResultReceived(CameraMetadataNative result, //这里回传了native metadata//
CaptureResultExtras resultExtras, PhysicalCaptureResultInfo physicalResults[])
throws RemoteException {
resultDispatch = new Runnable() {
@Override
public void run() {
final TotalCaptureResult resultAsCapture = new TotalCaptureResult(result,
request, resultExtras, partialResults, holder.getSessionId(),
physicalResults);//构建一个total结果,拍照完成后回调, callback.onCaptureCompleted(CameraOfflineSessionImpl.this,request, resultAsCapture);
}
}
/frameworks/base/core/java/android/hardware/camera2/CaptureResult.java
private final CameraMetadataNative mResults;
public CaptureResult(CameraMetadataNative results, CaptureRequest parent,
CaptureResultExtras extras) {
mResults = CameraMetadataNative.move(results);
}
/frameworks/base/core/java/android/hardware/camera2/
CaptureRequest.java
327 setNativeInstance(mLogicalCameraSettings); in CaptureRequest()
377 setNativeInstance(mLogicalCameraSettings); in CaptureRequest()
587 setNativeInstance(mLogicalCameraSettings); in readFromParcel()
CaptureResult.java
223 setNativeInstance(mResults); in CaptureResult()
253 setNativeInstance(mResults); in CaptureResult()
CameraCharacteristics.java
212 setNativeInstance(mProperties); in CameraCharacteristics()
/frameworks/base/core/java/android/hardware/camera2/CameraMetadata.java
private CameraMetadataNative mNativeInstance = null;
protected void setNativeInstance(CameraMetadataNative nativeInstance) {
mNativeInstance = nativeInstance;
}
/frameworks/base/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
==>native方法,调用到JNI方法
public CameraMetadataNative() {
super();
mMetadataPtr = nativeAllocate();
if (mMetadataPtr == 0) {
throw new OutOfMemoryError("Failed to allocate native CameraMetadata");
}
}
/frameworks/base/core/jni/android_hardware_camera2_CameraMetadata.cpp
static jlong CameraMetadata_allocate(JNIEnv *env, jclass thiz) {
ALOGV("%s", __FUNCTION__);
return reinterpret_cast(new std::shared_ptr(new CameraMetadata())); //构造一个CameraMetadata对象;
}
/frameworks/av/camera/CameraMetadata.cpp
camera_metadata_t *mBuffer; //metadata对应的buff变量;
/system/media/camera/src/camera_metadata.c //camera_metadata_t的实现
std::unique_ptr
std::shared_ptr
//==========================获取cameramanager
mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
android/app/Activity.java
public Object getSystemService(@ServiceName @NonNull String name) {
if (getBaseContext() == null) {
throw new IllegalStateException(
"System services not available to Activities before onCreate()");
}
if (WINDOW_SERVICE.equals(name)) {
return mWindowManager;
} else if (SEARCH_SERVICE.equals(name)) {
ensureSearchManager();
return mSearchManager;
}
return super.getSystemService(name);
}
android/view/ContextThemeWrapper.java
public Object getSystemService(String name) {
if (LAYOUT_INFLATER_SERVICE.equals(name)) {
if (mInflater == null) {
mInflater = LayoutInflater.from(getBaseContext()).cloneInContext(this);
}
return mInflater;
}
return getBaseContext().getSystemService(name);
}
/frameworks/base/core/java/android/app/SystemServiceRegistry.java
SystemServiceRegistry.registerService
1417 private static void registerService(@NonNull String serviceName,
1418 @NonNull Class serviceClass, @NonNull ServiceFetcher serviceFetcher) {
1419 SYSTEM_SERVICE_NAMES.put(serviceClass, serviceName);
1420 SYSTEM_SERVICE_FETCHERS.put(serviceName, serviceFetcher);
1421 SYSTEM_SERVICE_CLASS_NAMES.put(serviceName, serviceClass.getSimpleName());
1422 }
1370 public static Object getSystemService(ContextImpl ctx, String name) {
1371 if (name == null) {
1372 return null;
1373 }
1374 final ServiceFetcher> fetcher = SYSTEM_SERVICE_FETCHERS.get(name);
1375 if (fetcher == null) {
1376 if (sEnableServiceNotFoundWtf) {
1377 Slog.wtf(TAG, "Unknown manager requested: " + name);
1378 }
1379 return null;
1380 }
798 registerService(Context.CAMERA_SERVICE, CameraManager.class,
799 new CachedServiceFetcher() {
800 @Override
801 public CameraManager createService(ContextImpl ctx) {
802 return new CameraManager(ctx);
803 }});
binder
//camera2\CameraManager.java
private CameraDevice openCameraDeviceUserAsync(String cameraId,
CameraDevice.StateCallback callback, Executor executor, final int uid)
throws CameraAccessException {
android.hardware.camera2.impl.CameraDeviceImpl deviceImpl =
new android.hardware.camera2.impl.CameraDeviceImpl(
cameraId,
callback,
executor,
characteristics,
mContext.getApplicationInfo().targetSdkVersion);
ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
cameraUser = cameraService.connectDevice(callbacks, cameraId,
mContext.getOpPackageName(), mContext.getAttributionTag(), uid);
}
private void connectCameraServiceLocked() {
ICameraService cameraService = ICameraService.Stub.asInterface(cameraServiceBinder);
mCameraService = cameraService;
}
private CameraDevice openCameraDeviceUserAsync(String cameraId,
CameraDevice.StateCallback callback, Executor executor, final int uid)
throws CameraAccessException {
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
cameraUser = cameraService.connectDevice(callbacks, cameraId,
mContext.getOpPackageName(), mContext.getAttributionTag(),uid);
}
/frameworks/hardware/interfaces/cameraservice/service/2.0/ICameraService.hal
定义了接口
D:\and\av\services\camera\libcameraservice\hidl\HidlCameraService.cpp
using HCameraService = frameworks::cameraservice::service::V2_1::ICameraService;
D:\and\av\services\camera\libcameraservice\CameraService.cpp
void CameraService::onFirstRef()
{
sp hcs = HidlCameraService::getInstance(this);
hcs->registerAsService();//注册服务
}
D:\and\av\services\camera\libcameraservice\hidl\HidlCameraService.cpp
Return HidlCameraService::connectDevice(const sp& hCallback,
const hidl_string& cameraId,
connectDevice_cb _hidl_cb) {
binder::Status serviceRet = mAidlICameraService->connectDevice(
callbacks, String16(cameraId.c_str()), String16(""), std::unique_ptr(),
hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
}
Status CameraService::connectDevice(
const sp& cameraCb,
const String16& cameraId,
const String16& clientPackageName,
const std::unique_ptr& clientFeatureId,
int clientUid,
/*out*/
sp* device) {
}
//=======================================================//
驱动
IBinder cameraServiceBinder = ServiceManager.getService(CAMERA_SERVICE_BINDER_NAME);
ICameraService cameraService = ICameraService.Stub.asInterface(cameraServiceBinder);
/frameworks/base/core/java/android/os/ServiceManager.java
128 public static IBinder getService(String name) {
129 try {
130 IBinder service = sCache.get(name);
131 if (service != null) {
132 return service;
133 } else {
134 return Binder.allowBlocking(rawGetService(name));
135 }
136 } catch (RemoteException e) {
137 Log.e(TAG, "error in getService", e);
138 }
139 return null;
140 }
public static void initServiceCache(Map cache)
/frameworks/base/core/java/android/app/ActivityThread.java
1036 public final void bindApplication(String processName, ApplicationInfo appInfo,
1037 ProviderInfoList providerList, ComponentName instrumentationName,
1038 ProfilerInfo profilerInfo, Bundle instrumentationArgs,
1039 IInstrumentationWatcher instrumentationWatcher,
1040 IUiAutomationConnection instrumentationUiConnection, int debugMode,
1041 boolean enableBinderTracking, boolean trackAllocation,
1042 boolean isRestrictedBackupMode, boolean persistent, Configuration config,
1043 CompatibilityInfo compatInfo, Map services, Bundle coreSettings,
1044 String buildSerial, AutofillOptions autofillOptions,
1045 ContentCaptureOptions contentCaptureOptions, long[] disabledCompatChanges) {
ServiceManager.initServiceCache(services);
/frameworks/base/services/core/java/com/android/server/am/ActivityManagerService.java
5023 private boolean attachApplicationLocked(@NonNull IApplicationThread thread,
5024 int pid, int callingUid, long startSeq) {
/frameworks/base/core/java/android/os/IBinder.java
cameraservice
/frameworks/av/services/camera/libcameraservice/common/Camera2ClientBase.cpp
status_t CameraService::BasicClient::startCameraOps() {
sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
}
res = TClientBase::startCameraOps();
template
template
status_t Camera2ClientBase::initializeImpl(TProviderPtr providerPtr,
res = TClientBase::startCameraOps();
onCameraOpened
CameraService.cpp//
updateOpenCloseStatus
{
ret = it->getListener()->onCameraOpened(cameraId64, clientPackageName);
}
status_t CameraService::BasicClient::startCameraOps() {
}
status_t Camera2ClientBase::initializeImpl(TProviderPtr providerPtr,
const String8& monitorTags) {
res = TClientBase::startCameraOps();
}
status_t Camera2ClientBase::initialize(sp manager,
const String8& monitorTags) {
status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags) {
res = Camera2ClientBase::initialize(providerPtr, monitorTags);
}
status_t CameraDeviceClient::initialize(sp manager,
const String8& monitorTags) {
return initializeImpl(manager, monitorTags);
}
Status CameraService::connectHelper(const sp& cameraCb, const String8& cameraId,
int api1CameraId, int halVersion, const String16& clientPackageName,
const std::unique_ptr& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly,
/*out*/sp& device) {
err = client->initialize(mCameraProviderManager, mMonitorTags);
}