├── Yuv422UyvyToJpegEncoder.h ├── ImageConverter.h ├── README ├── Android.mk ├── Workers.h ├── V4l2Device.h ├── Camera.h ├── Yuv422UyvyToJpegEncoder.cpp ├── Workers.cpp ├── HalModule.cpp ├── ImageConverter.cpp ├── media_profiles.xml ├── DbgUtils.h ├── V4l2Device.cpp └── Camera.cpp /Yuv422UyvyToJpegEncoder.h: -------------------------------------------------------------------------------- 1 | #ifndef YUV422UYVYTOJPEGENCODER_H 2 | #define YUV422UYVYTOJPEGENCODER_H 3 | 4 | #include 5 | 6 | class Yuv422UyvyToJpegEncoder: public YuvToJpegEncoder { 7 | public: 8 | Yuv422UyvyToJpegEncoder(int* strides); 9 | virtual ~Yuv422UyvyToJpegEncoder() {} 10 | 11 | private: 12 | void configSamplingFactors(jpeg_compress_struct* cinfo); 13 | void compress(jpeg_compress_struct* cinfo, uint8_t* yuv, int* offsets); 14 | void deinterleave(uint8_t* yuv, uint8_t* yRows, uint8_t* uRows, 15 | uint8_t* vRows, int rowIndex, int width, int height); 16 | }; 17 | 18 | #endif // YUV422UYVYTOJPEGENCODER_H 19 | -------------------------------------------------------------------------------- /ImageConverter.h: -------------------------------------------------------------------------------- 1 | #ifndef IMAGECONVERTER_H 2 | #define IMAGECONVERTER_H 3 | 4 | #include 5 | #include "Workers.h" 6 | 7 | namespace android { 8 | 9 | class ImageConverter 10 | { 11 | public: 12 | ImageConverter(); 13 | ~ImageConverter(); 14 | 15 | uint8_t * YUY2ToRGBA(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height); 16 | uint8_t * YUY2ToJPEG(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, size_t dstLen, uint8_t quality); 17 | 18 | uint8_t * UYVYToRGBA(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height); 19 | uint8_t * UYVYToJPEG(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, size_t dstLen, uint8_t quality); 20 | 21 | protected: 22 | uint8_t * splitRunWait(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, Workers::Task::Function fn); 23 | 24 | private: 25 | struct ConvertTask { 26 | Workers::Task task; 27 | struct Data { 28 | const uint8_t *src; 29 | uint8_t *dst; 30 | size_t width; 31 | size_t linesNum; 32 | } data; 33 | }; 34 | }; 35 | 36 | }; /* namespace android */ 37 | 38 | #endif // IMAGECONVERTER_H 39 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | Minimal working V4L2-based Android Camera HAL driver. 2 | 3 | HAL version: 3.0 4 | Module version: 2.3 5 | Camera API version: 2 6 | 7 | 8 | 9 | LIMITATIONS 10 | ----------- 11 | 12 | * Supports only one camera 13 | 14 | * Tested only on Tegra K1, using one specific camera and a webcam. 15 | 16 | * No parameter control, most of the reported specs are hardcoded. 17 | 18 | * Resolution detection is limited up to 1920x1080. See availableResolutions() 19 | in V4l2Device.cpp. 20 | 21 | 22 | 23 | WORKAROUNDS/BUILD TIME CONFIGURATION 24 | ------------------------------------ 25 | 26 | In Android.mk there are some flags which control how the driver works. Some 27 | of them might not always work. 28 | 29 | 30 | LOCAL_CFLAGS += -DV4L2DEVICE_FPS_LIMIT= 31 | 32 | is positive integer. Limits framerate at the driver level. Helps when 33 | the kernel's V4L2 driver allows to read the buffers faster than it fills them 34 | with a new frames. Comment out to disable the limit. 35 | 36 | 37 | LOCAL_CFLAGS += -DV4L2DEVICE_BUF_COUNT= 38 | 39 | is a positive integer (4 by default) - V4L2 buffers count. 40 | 41 | 42 | LOCAL_CFLAGS += -DV4L2DEVICE_PIXEL_FORMAT=V4L2_PIX_FMT_UYVY 43 | #LOCAL_CFLAGS += -DV4L2DEVICE_PIXEL_FORMAT=V4L2_PIX_FMT_YUYV 44 | 45 | V4L2 source color format. 46 | 47 | 48 | LOCAL_CFLAGS += -DV4L2DEVICE_OPEN_ONCE 49 | 50 | Opens and initializes /dev/video0 during boot time. Comment out to open/close 51 | the device when a camera app is opened/closed. 52 | 53 | 54 | LOCAL_CFLAGS += -DV4L2DEVICE_USE_POLL 55 | 56 | Use poll() before dequeueing a buffer. 57 | 58 | 59 | 60 | BOOT TIME CONFIGURATION 61 | ----------------------- 62 | 63 | The "ro.camera.v4l2device.resolution" system property allows to force one single 64 | resolution (must be supported by V4L2). The value is in the "WIDTHxHEIGHT" format. 65 | 66 | 67 | 68 | HOW TO BUILD 69 | ------------ 70 | 71 | In your target's makefile (e.g. device/mycompany/mydevice/mydevice.mk) add: 72 | 73 | PRODUCT_PACKAGES += camera.$(TARGET_BOARD_PLATFORM) 74 | PRODUCT_PACKAGES += media_profiles.xml 75 | 76 | and just build Android. 77 | -------------------------------------------------------------------------------- /Android.mk: -------------------------------------------------------------------------------- 1 | LOCAL_PATH := $(call my-dir) 2 | 3 | #----------------------------------------------------------------------------- 4 | # Camera HAL module 5 | #----------------------------------------------------------------------------- 6 | 7 | include $(CLEAR_VARS) 8 | 9 | LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM) 10 | LOCAL_MODULE_RELATIVE_PATH := hw 11 | 12 | LOCAL_CFLAGS += -std=c++11 13 | LOCAL_CFLAGS += -fno-short-enums 14 | LOCAL_CFLAGS += -Wno-unused-parameter -Wno-missing-field-initializers 15 | LOCAL_CFLAGS += -pthread 16 | 17 | LOCAL_CFLAGS += -DV4L2DEVICE_FPS_LIMIT=60 18 | 19 | LOCAL_CFLAGS += -DV4L2DEVICE_BUF_COUNT=4 20 | 21 | # Camera color format 22 | LOCAL_CFLAGS += -DV4L2DEVICE_PIXEL_FORMAT=V4L2_PIX_FMT_UYVY 23 | #LOCAL_CFLAGS += -DV4L2DEVICE_PIXEL_FORMAT=V4L2_PIX_FMT_YUYV 24 | 25 | # Configure and open device once on HAL start 26 | LOCAL_CFLAGS += -DV4L2DEVICE_OPEN_ONCE 27 | 28 | LOCAL_CFLAGS += -DV4L2DEVICE_USE_POLL 29 | 30 | 31 | # Compile debug code - comment out to disable 32 | #LOCAL_CFLAGS += -UNDEBUG -DDEBUG 33 | 34 | 35 | LOCAL_STATIC_LIBRARIES := \ 36 | libyuv_static 37 | 38 | LOCAL_SHARED_LIBRARIES := \ 39 | liblog \ 40 | libutils \ 41 | libcutils \ 42 | libcamera_client \ 43 | libui \ 44 | libjpeg \ 45 | libcamera_metadata \ 46 | libskia \ 47 | libandroid_runtime 48 | 49 | LOCAL_C_INCLUDES += \ 50 | external/jpeg \ 51 | external/libyuv/files/include \ 52 | frameworks/native/include/media/hardware \ 53 | $(call include-path-for, camera) 54 | 55 | LOCAL_C_INCLUDES += \ 56 | external/skia/include/core/ \ 57 | frameworks/base/core/jni/android/graphics \ 58 | frameworks/native/include 59 | 60 | LOCAL_SRC_FILES += \ 61 | HalModule.cpp \ 62 | Camera.cpp \ 63 | V4l2Device.cpp \ 64 | ImageConverter.cpp \ 65 | Workers.cpp \ 66 | Yuv422UyvyToJpegEncoder.cpp 67 | 68 | include $(BUILD_SHARED_LIBRARY) 69 | 70 | #----------------------------------------------------------------------------- 71 | # media_profiles.xml 72 | #----------------------------------------------------------------------------- 73 | 74 | include $(CLEAR_VARS) 75 | 76 | LOCAL_MODULE := media_profiles.xml 77 | LOCAL_MODULE_CLASS := ETC 78 | LOCAL_MODULE_PATH := $(TARGET_OUT_ETC) 79 | LOCAL_SRC_FILES := media_profiles.xml 80 | 81 | include $(BUILD_PREBUILT) 82 | 83 | -------------------------------------------------------------------------------- /Workers.h: -------------------------------------------------------------------------------- 1 | #ifndef WORKERS_H 2 | #define WORKERS_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | namespace android { 11 | 12 | class Workers { 13 | public: 14 | class Task { 15 | 16 | public: 17 | typedef void (*Function)(void *); 18 | 19 | Task(Function fn, void *data): mFn(fn), mData(data), mCompleted(false) {} 20 | Task(): Task(NULL, NULL) {} 21 | Task& operator=(Task &&other) { 22 | mFn = other.mFn; 23 | mData = other.mData; 24 | mCompleted = other.mCompleted; 25 | return *this; 26 | } 27 | 28 | void waitForCompletion() { 29 | mMutex.lock(); 30 | if(!mCompleted) { 31 | mCond.wait(mMutex); 32 | } 33 | mMutex.unlock(); 34 | } 35 | 36 | void execute() { 37 | Mutex::Autolock lock(mMutex); 38 | mFn(mData); 39 | mCompleted = true; 40 | mCond.signal(); 41 | } 42 | 43 | private: 44 | Mutex mMutex; 45 | Condition mCond; 46 | Function mFn; 47 | void *mData; 48 | bool mCompleted; 49 | }; 50 | 51 | Workers(); 52 | ~Workers() {} 53 | 54 | bool start(); 55 | void stop(); 56 | bool isRunning() const { return mRunning; } 57 | 58 | unsigned threadsNum() { return (unsigned)mThreads.size(); } 59 | 60 | void queueTask(Task *task); 61 | 62 | private: 63 | class Thread { 64 | public: 65 | Thread(int id, Workers *parent): mId(id), mParent(parent) {} 66 | Thread(): mId(-1), mParent(NULL) {} 67 | 68 | void run() { pthread_create(&mThread, NULL, threadLoop, this); } 69 | void join() { pthread_join(mThread, NULL); } 70 | 71 | private: 72 | int mId; 73 | pthread_t mThread; 74 | Workers *mParent; 75 | 76 | static void * threadLoop(void *t); 77 | }; 78 | friend class Thread; 79 | 80 | bool mRunning; 81 | bool mExitRequest; 82 | 83 | Mutex mMutex; 84 | Condition mCond; 85 | List mTasks; 86 | Vector mThreads; 87 | }; 88 | 89 | extern Workers gWorkers; 90 | 91 | }; /* namespace android */ 92 | 93 | #endif // WORKERS_H 94 | -------------------------------------------------------------------------------- /V4l2Device.h: -------------------------------------------------------------------------------- 1 | #ifndef V4L2DEVICE_H 2 | #define V4L2DEVICE_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #ifndef V4L2DEVICE_BUF_COUNT 13 | # define V4L2DEVICE_BUF_COUNT 4 14 | #endif 15 | 16 | #ifndef V4L2DEVICE_PIXEL_FORMAT 17 | # warning V4L2DEVICE_PIXEL_FORMAT not defined, using default value (V4L2_PIX_FMT_UYVY) 18 | # define V4L2DEVICE_PIXEL_FORMAT V4L2_PIX_FMT_UYVY 19 | #endif 20 | 21 | namespace android { 22 | 23 | class V4l2Device 24 | { 25 | public: 26 | struct Resolution { 27 | unsigned width; 28 | unsigned height; 29 | }; 30 | 31 | class VBuffer { 32 | public: 33 | uint8_t *buf; 34 | uint32_t len; 35 | uint32_t pixFmt; 36 | 37 | private: 38 | VBuffer(): buf(NULL), len(0) {} 39 | ~VBuffer(); 40 | 41 | bool map(int fd, unsigned offset, unsigned len); 42 | void unmap(); 43 | 44 | friend class V4l2Device; 45 | }; 46 | 47 | V4l2Device(const char *devNode = "/dev/video0"); 48 | ~V4l2Device(); 49 | 50 | const Vector & availableResolutions(); 51 | V4l2Device::Resolution sensorResolution(); 52 | 53 | bool setResolution(unsigned width, unsigned height); 54 | V4l2Device::Resolution resolution(); 55 | 56 | bool connect(); 57 | bool disconnect(); 58 | bool isConnected() const { return mFd >= 0; } 59 | 60 | bool setStreaming(bool enable); 61 | bool isStreaming() const { return mStreaming; } 62 | 63 | const VBuffer * readLock(); 64 | bool unlock(const VBuffer *buf); 65 | 66 | private: 67 | bool queueBuffer(unsigned id); 68 | int dequeueBuffer(); 69 | 70 | bool iocStreamOff(); 71 | bool iocStreamOn(); 72 | bool iocSFmt(unsigned width, unsigned height); 73 | bool iocReqBufs(unsigned *count); 74 | bool iocQueryBuf(unsigned id, unsigned *offset, unsigned *len); 75 | 76 | bool setResolutionAndAllocateBuffers(unsigned width, unsigned height); 77 | void cleanup(); 78 | 79 | int mFd; 80 | bool mConnected; 81 | bool mStreaming; 82 | const char *mDevNode; 83 | Vector mAvailableResolutions; 84 | V4l2Device::Resolution mForcedResolution; 85 | struct v4l2_format mFormat; 86 | VBuffer mBuf[V4L2DEVICE_BUF_COUNT]; 87 | struct pollfd mPFd; 88 | 89 | #if V4L2DEVICE_FPS_LIMIT > 0 90 | nsecs_t mLastTimestamp; 91 | #endif 92 | }; 93 | 94 | }; /* namespace android */ 95 | 96 | #endif // V4L2DEVICE_H 97 | -------------------------------------------------------------------------------- /Camera.h: -------------------------------------------------------------------------------- 1 | #ifndef CAMERA_H 2 | #define CAMERA_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "Workers.h" 12 | #include "ImageConverter.h" 13 | #include "DbgUtils.h" 14 | 15 | namespace android { 16 | 17 | class Camera: public camera3_device { 18 | public: 19 | Camera(); 20 | virtual ~Camera(); 21 | 22 | bool isValid() { return mValid; } 23 | 24 | virtual status_t cameraInfo(struct camera_info *info); 25 | 26 | virtual int openDevice(hw_device_t **device); 27 | virtual int closeDevice(); 28 | 29 | 30 | protected: 31 | virtual camera_metadata_t * staticCharacteristics(); 32 | virtual int initialize(const camera3_callback_ops_t *callbackOps); 33 | virtual int configureStreams(camera3_stream_configuration_t *streamList); 34 | virtual const camera_metadata_t * constructDefaultRequestSettings(int type); 35 | virtual int registerStreamBuffers(const camera3_stream_buffer_set_t *bufferSet); 36 | virtual int processCaptureRequest(camera3_capture_request_t *request); 37 | 38 | /* HELPERS/SUBPROCEDURES */ 39 | 40 | void notifyShutter(uint32_t frameNumber, uint64_t timestamp); 41 | void processCaptureResult(uint32_t frameNumber, const camera_metadata_t *result, const Vector &buffers); 42 | 43 | camera_metadata_t *mStaticCharacteristics; 44 | camera_metadata_t *mDefaultRequestSettings[CAMERA3_TEMPLATE_COUNT]; 45 | CameraMetadata mLastRequestSettings; 46 | 47 | V4l2Device *mDev; 48 | bool mValid; 49 | const camera3_callback_ops_t *mCallbackOps; 50 | 51 | size_t mJpegBufferSize; 52 | 53 | private: 54 | ImageConverter mConverter; 55 | Mutex mMutex; 56 | 57 | /* STATIC WRAPPERS */ 58 | 59 | static int sClose(hw_device_t *device); 60 | static int sInitialize(const struct camera3_device *device, const camera3_callback_ops_t *callback_ops); 61 | static int sConfigureStreams(const struct camera3_device *device, camera3_stream_configuration_t *stream_list); 62 | static int sRegisterStreamBuffers(const struct camera3_device *device, const camera3_stream_buffer_set_t *buffer_set); 63 | static const camera_metadata_t * sConstructDefaultRequestSettings(const struct camera3_device *device, int type); 64 | static int sProcessCaptureRequest(const struct camera3_device *device, camera3_capture_request_t *request); 65 | static void sGetMetadataVendorTagOps(const struct camera3_device *device, vendor_tag_query_ops_t* ops); 66 | static void sDump(const struct camera3_device *device, int fd); 67 | static int sFlush(const struct camera3_device *device); 68 | 69 | static camera3_device_ops_t sOps; 70 | }; 71 | 72 | }; /* namespace android */ 73 | 74 | #endif // CAMERA_H 75 | -------------------------------------------------------------------------------- /Yuv422UyvyToJpegEncoder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include "Yuv422UyvyToJpegEncoder.h" 18 | 19 | /** 20 | * \class Yuv422UyvyToJpegEncoder 21 | * 22 | * Converts YUV(UYVY) image to JPEG. 23 | * 24 | * This is slightly modified Yuv422IToJpegEncoder from Android (frameworks/base/core/jni/android/graphics/YuvToJpegEncoder.cpp). 25 | */ 26 | 27 | Yuv422UyvyToJpegEncoder::Yuv422UyvyToJpegEncoder(int* strides) : 28 | YuvToJpegEncoder(strides) { 29 | fNumPlanes = 1; 30 | } 31 | 32 | void Yuv422UyvyToJpegEncoder::compress(jpeg_compress_struct* cinfo, 33 | uint8_t* yuv, int* offsets) { 34 | SkDebugf("onFlyCompress_422"); 35 | JSAMPROW y[16]; 36 | JSAMPROW cb[16]; 37 | JSAMPROW cr[16]; 38 | JSAMPARRAY planes[3]; 39 | planes[0] = y; 40 | planes[1] = cb; 41 | planes[2] = cr; 42 | 43 | int width = cinfo->image_width; 44 | int height = cinfo->image_height; 45 | uint8_t* yRows = new uint8_t [16 * width]; 46 | uint8_t* uRows = new uint8_t [16 * (width >> 1)]; 47 | uint8_t* vRows = new uint8_t [16 * (width >> 1)]; 48 | 49 | uint8_t* yuvOffset = yuv + offsets[0]; 50 | 51 | // process 16 lines of Y and 16 lines of U/V each time. 52 | while (cinfo->next_scanline < cinfo->image_height) { 53 | deinterleave(yuvOffset, yRows, uRows, vRows, cinfo->next_scanline, width, height); 54 | 55 | // Jpeg library ignores the rows whose indices are greater than height. 56 | for (int i = 0; i < 16; i++) { 57 | // y row 58 | y[i] = yRows + i * width; 59 | 60 | // construct u row and v row 61 | // width is halved because of downsampling 62 | int offset = i * (width >> 1); 63 | cb[i] = uRows + offset; 64 | cr[i] = vRows + offset; 65 | } 66 | 67 | jpeg_write_raw_data(cinfo, planes, 16); 68 | } 69 | delete [] yRows; 70 | delete [] uRows; 71 | delete [] vRows; 72 | } 73 | 74 | void Yuv422UyvyToJpegEncoder::deinterleave(uint8_t* yuv, uint8_t* yRows, uint8_t* uRows, 75 | uint8_t* vRows, int rowIndex, int width, int height) { 76 | int numRows = height - rowIndex; 77 | if (numRows > 16) numRows = 16; 78 | for (int row = 0; row < numRows; ++row) { 79 | uint8_t* yuvSeg = yuv + (rowIndex + row) * fStrides[0]; 80 | for (int i = 0; i < (width >> 1); ++i) { 81 | int indexY = row * width + (i << 1); 82 | int indexU = row * (width >> 1) + i; 83 | yRows[indexY] = yuvSeg[1]; 84 | yRows[indexY + 1] = yuvSeg[3]; 85 | uRows[indexU] = yuvSeg[0]; 86 | vRows[indexU] = yuvSeg[2]; 87 | yuvSeg += 4; 88 | } 89 | } 90 | } 91 | 92 | void Yuv422UyvyToJpegEncoder::configSamplingFactors(jpeg_compress_struct* cinfo) { 93 | // cb and cr are horizontally downsampled and vertically downsampled as well. 94 | cinfo->comp_info[0].h_samp_factor = 2; 95 | cinfo->comp_info[0].v_samp_factor = 2; 96 | cinfo->comp_info[1].h_samp_factor = 1; 97 | cinfo->comp_info[1].v_samp_factor = 2; 98 | cinfo->comp_info[2].h_samp_factor = 1; 99 | cinfo->comp_info[2].v_samp_factor = 2; 100 | } 101 | -------------------------------------------------------------------------------- /Workers.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | #include 19 | 20 | #include "Workers.h" 21 | 22 | namespace android { 23 | 24 | Workers gWorkers; 25 | 26 | /******************************************************************************\ 27 | Workers 28 | \******************************************************************************/ 29 | 30 | /** 31 | * \class Workers 32 | * 33 | * Worker threads implementation 34 | * 35 | * When started, waits for one or more generic tasks to be queued and executes 36 | * them in multiple threads. 37 | * 38 | * Implementation note: 39 | * There is no support for OpenMP nor C++11 Threads. libutil's Thread class 40 | * is more suitable for implementing specific threads than thread pools. 41 | * Lets go with pthread. 42 | */ 43 | 44 | Workers::Workers() 45 | : mRunning(false) 46 | , mExitRequest(false) { 47 | } 48 | 49 | /** 50 | * Starts threads. 51 | */ 52 | bool Workers::start() { 53 | if(mRunning) 54 | return false; 55 | 56 | const unsigned cpuThreadsCount = (unsigned)sysconf(_SC_NPROCESSORS_ONLN); 57 | mThreads.resize(cpuThreadsCount); 58 | 59 | int id = 0; 60 | for(auto it = mThreads.begin(); it != mThreads.end(); ++it) { 61 | *it = Thread(id++, this); 62 | it->run(); 63 | } 64 | 65 | mRunning = true; 66 | 67 | return true; 68 | } 69 | 70 | /** 71 | * Stops all threads. 72 | * 73 | * No new task is picked, but the ones already in processing will finish. 74 | */ 75 | void Workers::stop() { 76 | if(!mRunning) 77 | return; 78 | 79 | { 80 | Mutex::Autolock lock(mMutex); 81 | 82 | mExitRequest = true; 83 | mCond.broadcast(); 84 | } 85 | for(auto it = mThreads.begin(); it != mThreads.end(); ++it) { 86 | it->join(); 87 | } 88 | 89 | mThreads.clear(); 90 | mRunning = false; 91 | mExitRequest = false; 92 | } 93 | 94 | /** 95 | * Queues task and returns without waiting for it to be processed. 96 | */ 97 | void Workers::queueTask(Workers::Task *task) { 98 | Mutex::Autolock lock(mMutex); 99 | if(!mRunning) 100 | start(); 101 | 102 | mTasks.push_back(task); 103 | mCond.signal(); 104 | } 105 | 106 | /******************************************************************************\ 107 | Workers::Thread 108 | \******************************************************************************/ 109 | 110 | /** 111 | * \class Workers::Thread 112 | * 113 | * Interal thread representation. 114 | */ 115 | 116 | /** 117 | * Thread main loop 118 | */ 119 | void *Workers::Thread::threadLoop(void *t) { 120 | Thread *thread = static_cast(t); 121 | assert(thread != NULL); 122 | Workers *workers = thread->mParent; 123 | assert(workers != NULL); 124 | 125 | for(;;) { 126 | Workers::Task *task = NULL; 127 | 128 | { 129 | Mutex::Autolock lock(workers->mMutex); 130 | 131 | while(workers->mTasks.empty() && !workers->mExitRequest) 132 | workers->mCond.wait(workers->mMutex); 133 | 134 | if(workers->mExitRequest) 135 | break; 136 | 137 | /* pop task from queue */ 138 | task = *workers->mTasks.begin(); 139 | workers->mTasks.erase(workers->mTasks.begin()); 140 | } 141 | 142 | /* process task */ 143 | task->execute(); 144 | } 145 | 146 | pthread_exit(NULL); 147 | } 148 | 149 | }; /* namespace android */ 150 | -------------------------------------------------------------------------------- /HalModule.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #define LOG_TAG "Cam-HalModule" 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #include "Camera.h" 26 | 27 | /******************************************************************************\ 28 | DECLARATIONS 29 | Not used in any other project source files, header file is redundant 30 | \******************************************************************************/ 31 | 32 | extern camera_module_t HAL_MODULE_INFO_SYM; 33 | 34 | namespace android { 35 | namespace HalModule { 36 | 37 | /* Available cameras */ 38 | extern Camera *cams[]; 39 | 40 | static int getNumberOfCameras(); 41 | static int getCameraInfo(int cameraId, struct camera_info *info); 42 | static int setCallbacks(const camera_module_callbacks_t *callbacks); 43 | static void getVendorTagOps(vendor_tag_ops_t* ops); 44 | static int openDevice(const hw_module_t *module, const char *name, hw_device_t **device); 45 | 46 | static struct hw_module_methods_t moduleMethods = { 47 | .open = openDevice 48 | }; 49 | 50 | }; /* namespace HalModule */ 51 | }; /* namespace android */ 52 | 53 | /******************************************************************************\ 54 | DEFINITIONS 55 | \******************************************************************************/ 56 | 57 | camera_module_t HAL_MODULE_INFO_SYM = { 58 | .common = { 59 | .tag = HARDWARE_MODULE_TAG, 60 | .module_api_version = CAMERA_MODULE_API_VERSION_2_3, 61 | .hal_api_version = HARDWARE_HAL_API_VERSION, 62 | .id = CAMERA_HARDWARE_MODULE_ID, 63 | .name = "V4l2 Camera", 64 | .author = "Antmicro Ltd.", 65 | .methods = &android::HalModule::moduleMethods, 66 | .dso = NULL, 67 | .reserved = {0} 68 | }, 69 | .get_number_of_cameras = android::HalModule::getNumberOfCameras, 70 | .get_camera_info = android::HalModule::getCameraInfo, 71 | .set_callbacks = android::HalModule::setCallbacks, 72 | }; 73 | 74 | namespace android { 75 | namespace HalModule { 76 | 77 | static Camera mainCamera; 78 | Camera *cams[] = { 79 | &mainCamera 80 | }; 81 | 82 | static int getNumberOfCameras() { 83 | return NELEM(cams); 84 | }; 85 | 86 | static int getCameraInfo(int cameraId, struct camera_info *info) { 87 | if(cameraId < 0 || cameraId >= getNumberOfCameras()) { 88 | ALOGE("%s: invalid camera ID (%d)", __FUNCTION__, cameraId); 89 | return -ENODEV; 90 | } 91 | if(!cams[cameraId]->isValid()) { 92 | ALOGE("%s: camera %d is not initialized", __FUNCTION__, cameraId); 93 | return -ENODEV; 94 | } 95 | return cams[cameraId]->cameraInfo(info); 96 | } 97 | 98 | int setCallbacks(const camera_module_callbacks_t * /*callbacks*/) { 99 | /* TODO: Implement for hotplug support */ 100 | return OK; 101 | } 102 | 103 | static int openDevice(const hw_module_t *module, const char *name, hw_device_t **device) { 104 | if (module != &HAL_MODULE_INFO_SYM.common) { 105 | ALOGE("%s: invalid module (%p != %p)", __FUNCTION__, module, &HAL_MODULE_INFO_SYM.common); 106 | return -EINVAL; 107 | } 108 | if (name == NULL) { 109 | ALOGE("%s: NULL name", __FUNCTION__); 110 | return -EINVAL; 111 | } 112 | errno = 0; 113 | int cameraId = (int)strtol(name, NULL, 10); 114 | if(errno || cameraId < 0 || cameraId >= getNumberOfCameras()) { 115 | ALOGE("%s: invalid camera ID (%s)", __FUNCTION__, name); 116 | return -EINVAL; 117 | } 118 | if(!cams[cameraId]->isValid()) { 119 | ALOGE("%s: camera %d is not initialized", __FUNCTION__, cameraId); 120 | *device = NULL; 121 | return -ENODEV; 122 | } 123 | 124 | return cams[cameraId]->openDevice(device); 125 | } 126 | 127 | }; /* namespace HalModule */ 128 | }; /* namespace android */ 129 | -------------------------------------------------------------------------------- /ImageConverter.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | #include 19 | #include 20 | 21 | #include "Yuv422UyvyToJpegEncoder.h" 22 | #include "ImageConverter.h" 23 | #include "DbgUtils.h" 24 | 25 | #define WORKERS_TASKS_NUM 30 26 | 27 | namespace android { 28 | 29 | ImageConverter::ImageConverter() { 30 | } 31 | 32 | ImageConverter::~ImageConverter() { 33 | } 34 | 35 | uint8_t *ImageConverter::YUY2ToRGBA(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height) { 36 | assert(gWorkers.isRunning()); 37 | assert(src != NULL); 38 | assert(dst != NULL); 39 | assert(width > 0); 40 | assert(height > 0); 41 | 42 | Workers::Task::Function taskFn = [](void *data) { 43 | ConvertTask::Data *d = static_cast(data); 44 | 45 | SIMD_ALIGNED(uint8 rowy[kMaxStride]); 46 | SIMD_ALIGNED(uint8 rowu[kMaxStride]); 47 | SIMD_ALIGNED(uint8 rowv[kMaxStride]); 48 | 49 | for(size_t i = 0; i < d->linesNum; ++i) { 50 | libyuv::YUY2ToUV422Row_NEON(d->src, rowu, rowv, d->width); 51 | libyuv::YUY2ToYRow_NEON(d->src, rowy, d->width); 52 | /* Somehow destination format is swapped (*ABGR converts to RGBA) */ 53 | libyuv::I422ToABGRRow_NEON(rowy, rowu, rowv, d->dst, d->width); 54 | d->src += d->width * 2; 55 | d->dst += d->width * 4; 56 | } 57 | }; 58 | 59 | return splitRunWait(src, dst, width, height, taskFn); 60 | } 61 | 62 | uint8_t *ImageConverter::YUY2ToJPEG(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, size_t dstLen, uint8_t quality) { 63 | assert(src != NULL); 64 | assert(dst != NULL); 65 | assert(width > 0); 66 | assert(height > 0); 67 | assert(dstLen > 0); 68 | assert(quality <= 100); 69 | 70 | /* TODO: do it parallel with libjpeg */ 71 | 72 | int strides[] = { (int)width * 2 }; 73 | int offsets[] = { 0 }; 74 | Yuv422IToJpegEncoder encoder(strides); 75 | SkDynamicMemoryWStream stream; 76 | 77 | encoder.encode(&stream, (void *)src, (int)width, (int)height, offsets, quality); 78 | 79 | if(stream.getOffset() > dstLen) 80 | return dst; 81 | 82 | stream.copyTo(dst); 83 | return dst + stream.getOffset(); 84 | } 85 | 86 | uint8_t *ImageConverter::UYVYToRGBA(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height) { 87 | assert(gWorkers.isRunning()); 88 | assert(src != NULL); 89 | assert(dst != NULL); 90 | assert(width > 0); 91 | assert(height > 0); 92 | 93 | Workers::Task::Function taskFn = [](void *data) { 94 | ConvertTask::Data *d = static_cast(data); 95 | 96 | SIMD_ALIGNED(uint8 rowy[kMaxStride]); 97 | SIMD_ALIGNED(uint8 rowu[kMaxStride]); 98 | SIMD_ALIGNED(uint8 rowv[kMaxStride]); 99 | 100 | for(size_t i = 0; i < d->linesNum; ++i) { 101 | libyuv::UYVYToUV422Row_NEON(d->src, rowu, rowv, d->width); 102 | libyuv::UYVYToYRow_NEON(d->src, rowy, d->width); 103 | /* Somehow destination format is swapped (*ABGR converts to RGBA) */ 104 | libyuv::I422ToABGRRow_NEON(rowy, rowu, rowv, d->dst, d->width); 105 | d->src += d->width * 2; 106 | d->dst += d->width * 4; 107 | } 108 | }; 109 | 110 | return splitRunWait(src, dst, width, height, taskFn); 111 | } 112 | 113 | uint8_t *ImageConverter::UYVYToJPEG(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, size_t dstLen, uint8_t quality) { 114 | assert(src != NULL); 115 | assert(dst != NULL); 116 | assert(width > 0); 117 | assert(height > 0); 118 | assert(dstLen > 0); 119 | assert(quality <= 100); 120 | 121 | /* TODO: do it parallel with libjpeg */ 122 | 123 | int strides[] = { (int)width * 2 }; 124 | int offsets[] = { 0 }; 125 | Yuv422UyvyToJpegEncoder encoder(strides); 126 | SkDynamicMemoryWStream stream; 127 | 128 | encoder.encode(&stream, (void *)src, (int)width, (int)height, offsets, quality); 129 | 130 | if(stream.getOffset() > dstLen) 131 | return dst; 132 | 133 | stream.copyTo(dst); 134 | return dst + stream.getOffset(); 135 | } 136 | 137 | uint8_t * ImageConverter::splitRunWait(const uint8_t *src, uint8_t *dst, unsigned width, unsigned height, Workers::Task::Function fn) { 138 | ConvertTask tasks[WORKERS_TASKS_NUM]; 139 | 140 | const uint8_t *srcPtr = src; 141 | uint8_t *dstPtr = dst; 142 | const size_t linesPerTask = (height + WORKERS_TASKS_NUM - 1) / WORKERS_TASKS_NUM; 143 | for(size_t i = 0; i < WORKERS_TASKS_NUM; ++i) { 144 | tasks[i].data.src = srcPtr; 145 | tasks[i].data.dst = dstPtr; 146 | tasks[i].data.width = width; 147 | tasks[i].data.linesNum = linesPerTask; 148 | if((i + 1) * linesPerTask >= height) { 149 | tasks[i].data.linesNum = height - i * linesPerTask; 150 | } 151 | 152 | tasks[i].task = Workers::Task(fn, (void *)&tasks[i].data); 153 | gWorkers.queueTask(&tasks[i].task); 154 | 155 | srcPtr += linesPerTask * width * 2; 156 | dstPtr += linesPerTask * width * 4; 157 | } 158 | 159 | for(size_t i = 0; i < WORKERS_TASKS_NUM; ++i) { 160 | tasks[i].task.waitForCompletion(); 161 | } 162 | 163 | return dstPtr; 164 | } 165 | 166 | }; /* namespace android */ 167 | -------------------------------------------------------------------------------- /media_profiles.xml: -------------------------------------------------------------------------------- 1 | 2 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | ]> 85 | 89 | 90 | 91 | 92 | 93 | 94 | 108 | 109 | 123 | 124 | 125 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 153 | 158 | 159 | 164 | 165 | 169 | 170 | 174 | 175 | 176 | 177 | 178 | -------------------------------------------------------------------------------- /DbgUtils.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #ifndef DBGUTILS_H 18 | #define DBGUTILS_H 19 | 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #ifndef NDEBUG 26 | #define NDEBUG 0 27 | #endif 28 | 29 | #if !NDEBUG 30 | # define DEBUG_CODE(x) x 31 | #else 32 | # define DEBUG_CODE(x) 33 | #endif 34 | 35 | /******************************************************************************\ 36 | AutoLogCall 37 | \******************************************************************************/ 38 | 39 | namespace android { 40 | namespace DbgUtils { 41 | 42 | class AutoLogCall { 43 | public: 44 | AutoLogCall(const char *name): mName(name) { 45 | static __thread unsigned level; 46 | mLevel = &level; 47 | ALOGV("%*s+ %s", *mLevel * 4, "", name); 48 | ++(*mLevel); 49 | } 50 | ~AutoLogCall() { 51 | --(*mLevel); 52 | ALOGV("%*s- %s", *mLevel * 4, "", mName); 53 | } 54 | 55 | private: 56 | const char *mName; 57 | unsigned *mLevel; 58 | }; 59 | 60 | }; /* namespace DbgUtils */ 61 | }; /* namespace android */ 62 | 63 | #if !NDEBUG 64 | # define DBGUTILS_AUTOLOGCALL(name) android::DbgUtils::AutoLogCall _autoLogCall(name) 65 | #else 66 | # define DBGUTILS_AUTOLOGCALL(name) 67 | #endif 68 | 69 | /******************************************************************************\ 70 | FpsCounter 71 | \******************************************************************************/ 72 | 73 | #define FPSCOUNTER_VARIABLE_NAME _fpsCounterState 74 | #define FPSCOUNTER_CLASS_WITH_NS android::DbgUtils::FpsCounter 75 | 76 | #include 77 | 78 | namespace android { 79 | namespace DbgUtils { 80 | 81 | template 82 | class FpsCounter { 83 | public: 84 | FpsCounter(): mTimeId(SAMPLES - 1), mSamplesCount(0) { 85 | for(size_t i = 0; i < SAMPLES; ++i) 86 | mTime[i] = 0; 87 | } 88 | 89 | double fps(int samples = SAMPLES - 1) { 90 | if(samples >= mSamplesCount) 91 | samples = mSamplesCount - 1; 92 | if(samples < 1) 93 | samples = 1; 94 | 95 | unsigned pastTime; 96 | pastTime = (mTimeId + SAMPLES - samples) % SAMPLES; 97 | 98 | return samples * 1000000000.0f / (mTime[mTimeId] - mTime[pastTime]); 99 | } 100 | 101 | void tick() { 102 | mTimeId = (mTimeId + 1) % SAMPLES; 103 | mTime[mTimeId] = systemTime(); 104 | if(mSamplesCount < SAMPLES) 105 | ++mSamplesCount; 106 | } 107 | 108 | nsecs_t mTime[SAMPLES]; 109 | unsigned mTimeId; 110 | unsigned mSamplesCount; 111 | }; 112 | 113 | }; /* namespace DbgUtils */ 114 | }; /* namespace android */ 115 | 116 | #if !NDEBUG 117 | # define FPSCOUNTER_HERE(samples) \ 118 | static FPSCOUNTER_CLASS_WITH_NS FPSCOUNTER_VARIABLE_NAME; \ 119 | FPSCOUNTER_VARIABLE_NAME.tick(); 120 | 121 | # define FPSCOUNTER_VALUE(samples) \ 122 | (FPSCOUNTER_VARIABLE_NAME.fps(samples)) 123 | 124 | #else 125 | # define FPSCOUNTER_HERE(samples) 126 | # define FPSCOUNTER_VALUE(samples) (0.0f) 127 | #endif 128 | 129 | /******************************************************************************\ 130 | Benchmark 131 | \******************************************************************************/ 132 | 133 | #define BENCHMARK_VARIABLE_NAME _benchmarkState 134 | #define BENCHMARK_CLASS_WITH_NS android::DbgUtils::Benchmark 135 | 136 | #include 137 | #include 138 | #include 139 | #include 140 | 141 | #ifdef HAVE_ANDROID_OS 142 | # include 143 | # define BENCHMARK_VECTOR android::Vector 144 | /* Android vector's operator [] is const */ 145 | # define BENCHMARK_VECTOR_ITEM_EDIT(vec, id) (vec).editItemAt(id) 146 | #else 147 | # include 148 | # define BENCHMARK_VECTOR std::vector 149 | # define BENCHMARK_VECTOR_ITEM_EDIT(vec, id) (vec)[id] 150 | #endif 151 | 152 | namespace android { 153 | namespace DbgUtils { 154 | 155 | template 156 | class Benchmark { 157 | public: 158 | Benchmark() {} 159 | ~Benchmark() {} 160 | 161 | int begin(const char *sectionName) { 162 | int id = mSections.size(); 163 | for(int i = 0; i < mSections.size(); ++i) { 164 | if(!strcmp(mSections[i].name, sectionName)) { 165 | id = i; 166 | break; 167 | } 168 | } 169 | if(id == mSections.size()) { 170 | Section newSection; 171 | newSection.name = sectionName; 172 | for(unsigned i = 0; i < SAMPLES; ++i) { 173 | newSection.time[i] = 0; 174 | } 175 | newSection.timeId = SAMPLES - 1; 176 | newSection.samplesCount = 0; 177 | newSection.count = 0; 178 | mSections.push_back(newSection); 179 | } 180 | Section &sec = BENCHMARK_VECTOR_ITEM_EDIT(mSections, id); 181 | if(sec.count == 0) { 182 | sec.timeId = (sec.timeId + 1) % SAMPLES; 183 | sec.time[sec.timeId] = 0; 184 | if(sec.samplesCount < SAMPLES) 185 | ++sec.samplesCount; 186 | } 187 | ++sec.count; 188 | sec.time[sec.timeId] -= currentTimeNs(); 189 | return id; 190 | } 191 | 192 | void end(int id) { 193 | Section &sec = BENCHMARK_VECTOR_ITEM_EDIT(mSections, id); 194 | sec.time[sec.timeId] += currentTimeNs(); 195 | } 196 | 197 | bool formatString(char *out, size_t len, int precision) { 198 | for(int i = 0; i < mSections.size() && len > 0; ++i) { 199 | Section &sec = BENCHMARK_VECTOR_ITEM_EDIT(mSections, i); 200 | double t = (double)sec.time[sec.timeId] / 1000000000.0f; 201 | if(sec.count == 0) 202 | t = 0.0f; 203 | double avg = 0.0f; 204 | for(unsigned j = 0; j < sec.samplesCount; ++j) { 205 | const unsigned jj = (SAMPLES + sec.timeId - j) % SAMPLES; 206 | avg += (double)sec.time[jj]; 207 | } 208 | avg = avg / sec.samplesCount / 1000000000.0f; 209 | size_t printedNum; 210 | printedNum = snprintf(out, len, "%s%s[%u]: %.*f (%.*f)", 211 | i != 0 ? " " : "", 212 | sec.name, sec.count, 213 | precision, t, precision, avg); 214 | len -= printedNum; 215 | out += printedNum; 216 | } 217 | return (len > 0); 218 | } 219 | 220 | void newCycle() { 221 | for(int i = 0; i < mSections.size(); ++i) { 222 | Section &sec = BENCHMARK_VECTOR_ITEM_EDIT(mSections, i); 223 | sec.count = 0; 224 | } 225 | } 226 | 227 | private: 228 | int64_t currentTimeNs() { 229 | struct timespec t; 230 | clock_gettime(CLOCK_MONOTONIC, &t); 231 | return t.tv_sec * 1000000000LL + t.tv_nsec; 232 | } 233 | 234 | struct Section { 235 | const char *name; 236 | int64_t time[SAMPLES]; 237 | unsigned timeId; 238 | unsigned samplesCount; 239 | unsigned count; 240 | }; 241 | BENCHMARK_VECTOR
mSections; 242 | }; 243 | 244 | }; /* namespace DbgUtils */ 245 | }; /* namespace android */ 246 | 247 | #if !NDEBUG 248 | # define BENCHMARK_HERE(samples) \ 249 | static BENCHMARK_CLASS_WITH_NS BENCHMARK_VARIABLE_NAME; \ 250 | BENCHMARK_VARIABLE_NAME.newCycle(); 251 | 252 | # define BENCHMARK_SECTION(name) \ 253 | for(int _benchmarkId = BENCHMARK_VARIABLE_NAME.begin(name); \ 254 | _benchmarkId >= 0; \ 255 | BENCHMARK_VARIABLE_NAME.end(_benchmarkId), _benchmarkId = -1) 256 | 257 | # define BENCHMARK_STRING(str, len, prec) \ 258 | BENCHMARK_VARIABLE_NAME.formatString(str, len, prec); 259 | #else 260 | # define BENCHMARK_HERE(samples) 261 | # define BENCHMARK_SECTION(name) if(true) 262 | # define BENCHMARK_STRING(str, len, prec) (*str = '\0') 263 | #endif 264 | 265 | #undef BENCHMARK_VECTOR 266 | 267 | #endif // DBGUTILS_H 268 | -------------------------------------------------------------------------------- /V4l2Device.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #define LOG_TAG "Cam-V4l2Device" 18 | #define LOG_NDEBUG NDEBUG 19 | 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | #include 34 | 35 | #include "V4l2Device.h" 36 | 37 | namespace android { 38 | 39 | /******************************************************************************\ 40 | Helpers 41 | \******************************************************************************/ 42 | 43 | static inline int openFd(const char *path) { 44 | assert(path); 45 | int flags = O_RDWR; 46 | #ifdef V4L2DEVICE_USE_POLL 47 | flags |= O_NONBLOCK; 48 | #endif 49 | int fd = open(path, flags); 50 | ALOGV("open %s = %d", path, fd); 51 | return fd; 52 | } 53 | 54 | static inline void closeFd(int *fd) { 55 | assert(fd); 56 | close(*fd); 57 | ALOGV("close %d", *fd); 58 | *fd = -1; 59 | } 60 | 61 | /******************************************************************************\ 62 | V4l2Device 63 | \******************************************************************************/ 64 | 65 | /** 66 | * \class V4l2Device 67 | * 68 | * Simple wrapper for part of V4L2 camera interface. 69 | */ 70 | 71 | /** 72 | * Initializes object. 73 | * 74 | * \parameter devNode Path to V4L2 device node 75 | */ 76 | V4l2Device::V4l2Device(const char *devNode) 77 | : mFd(-1) 78 | , mConnected(false) 79 | , mStreaming(false) 80 | , mDevNode(devNode) 81 | { 82 | memset(&mFormat, 0, sizeof(mFormat)); 83 | mPFd.fd = -1; 84 | mPFd.events = POLLIN | POLLRDNORM; 85 | 86 | #if V4L2DEVICE_FPS_LIMIT > 0 87 | mLastTimestamp = 0; 88 | #endif 89 | 90 | /* Ignore multiple possible devices for now */ 91 | char resStr[PROPERTY_VALUE_MAX]; 92 | int ret; 93 | ret = property_get("ro.camera.v4l2device.resolution", resStr, ""); 94 | if(ret > 0) { 95 | /* parse forced resolution as WIDTHxHEIGHT */ 96 | char *heightStr = strchr(resStr, 'x'); 97 | if(heightStr) 98 | *heightStr++ = '\0'; 99 | 100 | errno = 0; 101 | mForcedResolution.width = strtoul(resStr, NULL, 10); 102 | ret = errno; 103 | mForcedResolution.height = strtoul(heightStr, NULL, 10); 104 | ret |= errno; 105 | 106 | if(ret) { 107 | mForcedResolution.width = mForcedResolution.height = 0; 108 | } 109 | } 110 | 111 | #ifdef V4L2DEVICE_OPEN_ONCE 112 | connect(); 113 | #endif 114 | } 115 | 116 | V4l2Device::~V4l2Device() { 117 | if(isStreaming()) { 118 | iocStreamOff(); 119 | } 120 | cleanup(); 121 | } 122 | 123 | /** 124 | * Returns array of camera's supported resolutions. 125 | * 126 | * Resolution can be forced by setting property ro.camera.v4l2device.resolution to value WIDTHxHEIGHT (e.g. 1920x1080) 127 | */ 128 | const Vector & V4l2Device::availableResolutions() { 129 | if(!mAvailableResolutions.isEmpty()) { 130 | return mAvailableResolutions; 131 | } 132 | 133 | if(mForcedResolution.width > 0 && mForcedResolution.height > 0) { 134 | ALOGI("Using forced resolution: %ux%u", mForcedResolution.width, mForcedResolution.height); 135 | mAvailableResolutions.add(mForcedResolution); 136 | } else { 137 | int fd; 138 | bool fdNeedsClose = false; 139 | Vector formats; 140 | 141 | if(mFd >= 0) { 142 | fd = mFd; 143 | } else { 144 | fd = openFd(mDevNode); 145 | fdNeedsClose = true; 146 | } 147 | if(fd < 0) { 148 | ALOGE("Could not open %s: %s (%d)", mDevNode, strerror(errno), errno); 149 | return mAvailableResolutions; 150 | } 151 | 152 | struct v4l2_frmsizeenum frmSize; 153 | frmSize.pixel_format = V4L2DEVICE_PIXEL_FORMAT; 154 | frmSize.index = 0; 155 | 156 | errno = 0; 157 | while(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmSize) == 0) { 158 | ALOGD("%s: Found resolution: %dx%d", mDevNode, frmSize.discrete.width, frmSize.discrete.height); 159 | ++frmSize.index; 160 | // FIXME: make it configurable or fix the out of memory problem 161 | if(frmSize.discrete.width > 1920 || frmSize.discrete.height > 1080) { 162 | ALOGD(" too big, ignoring"); 163 | continue; 164 | } 165 | formats.add(); 166 | formats.editTop().width = frmSize.discrete.width; 167 | formats.editTop().height = frmSize.discrete.height; 168 | } 169 | if(errno && errno != EINVAL) { 170 | ALOGW("Get available formats: %s (%d)", strerror(errno), errno); 171 | } 172 | 173 | if(fdNeedsClose) { 174 | closeFd(&fd); 175 | } 176 | 177 | mAvailableResolutions = formats; 178 | } 179 | 180 | return mAvailableResolutions; 181 | } 182 | 183 | /** 184 | * Returns V4l2Device::Resolution with highest possible width and highest 185 | * possible height. This might not to be valid camera resolution. 186 | */ 187 | V4l2Device::Resolution V4l2Device::sensorResolution() { 188 | const Vector &formats = availableResolutions(); 189 | V4l2Device::Resolution max = {0, 0}; 190 | for(size_t i = 0; i < formats.size(); ++i) { 191 | if(formats[i].width > max.width) 192 | max.width = formats[i].width; 193 | if(formats[i].height > max.height) 194 | max.height = formats[i].height; 195 | } 196 | return max; 197 | } 198 | 199 | /** 200 | * Sets new resolution. The resolution must be supported by camera. If it does 201 | * not, false is returned. Call only with disabled streaming. 202 | */ 203 | bool V4l2Device::setResolution(unsigned width, unsigned height) { 204 | if(mFormat.fmt.pix.width == width && mFormat.fmt.pix.height == height) 205 | return true; 206 | 207 | ALOGD("New resolution: %dx%d", width, height); 208 | if(isConnected()) { 209 | #ifndef V4L2DEVICE_OPEN_ONCE 210 | disconnect(); 211 | mFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 212 | mFormat.fmt.pix.pixelformat = V4L2DEVICE_PIXEL_FORMAT; 213 | mFormat.fmt.pix.width = width; 214 | mFormat.fmt.pix.height = height; 215 | connect(); 216 | #else 217 | ALOGD("Resolution change not supported"); 218 | #endif 219 | return true; 220 | } else { 221 | mFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 222 | mFormat.fmt.pix.pixelformat = V4L2DEVICE_PIXEL_FORMAT; 223 | mFormat.fmt.pix.width = width; 224 | mFormat.fmt.pix.height = height; 225 | return true; 226 | } 227 | } 228 | 229 | /** 230 | * Returns current resolution 231 | */ 232 | V4l2Device::Resolution V4l2Device::resolution() { 233 | Resolution res; 234 | res.width = mFormat.fmt.pix.width; 235 | res.height = mFormat.fmt.pix.height; 236 | return res; 237 | } 238 | 239 | /** 240 | * Connects to camera, allocates buffers, starts streaming 241 | */ 242 | bool V4l2Device::connect() { 243 | if(isConnected()) 244 | return false; 245 | 246 | mFd = openFd(mDevNode); 247 | if(mFd < 0) { 248 | ALOGE("Could not open %s: %s (%d)", mDevNode, strerror(errno), errno); 249 | return false; 250 | } 251 | 252 | unsigned width; 253 | unsigned height; 254 | if(mFormat.type) { 255 | width = mFormat.fmt.pix.width; 256 | height = mFormat.fmt.pix.height; 257 | } else { 258 | auto resolutions = availableResolutions(); 259 | if(resolutions.isEmpty()) { 260 | ALOGE("No available resolutions found, aborting"); 261 | closeFd(&mFd); 262 | return false; 263 | } 264 | auto defaultRes = resolutions[0]; 265 | width = resolutions[0].width; 266 | height = resolutions[0].height; 267 | ALOGD("Using default resolution: %dx%d", defaultRes.width, defaultRes.height); 268 | } 269 | if(!setResolutionAndAllocateBuffers(width, height)) { 270 | ALOGE("Could not set resolution"); 271 | closeFd(&mFd); 272 | return false; 273 | } 274 | 275 | mPFd.fd = mFd; 276 | mPFd.revents = 0; 277 | mConnected = true; 278 | 279 | return true; 280 | } 281 | 282 | /** 283 | * Stops streaming and disconnects from camera device. 284 | */ 285 | bool V4l2Device::disconnect() { 286 | if(!isConnected()) 287 | return false; 288 | 289 | setStreaming(false); 290 | #ifndef V4L2DEVICE_OPEN_ONCE 291 | cleanup(); 292 | #endif 293 | 294 | return true; 295 | } 296 | 297 | bool V4l2Device::setStreaming(bool enable) { 298 | if(enable == mStreaming) 299 | return true; 300 | 301 | if(!isConnected()) 302 | return !enable; 303 | 304 | if(enable) { 305 | if(!iocStreamOn()) { 306 | ALOGE("Could not start streaming: %s (%d)", strerror(errno), errno); 307 | return false; 308 | } 309 | } else { 310 | #ifdef V4L2DEVICE_OPEN_ONCE 311 | return true; 312 | #else 313 | if(!iocStreamOff()) { 314 | ALOGE("Could not stop streaming: %s (%d)", strerror(errno), errno); 315 | return false; 316 | } 317 | #endif 318 | } 319 | 320 | mStreaming = enable; 321 | 322 | return true; 323 | } 324 | 325 | /** 326 | * Lock buffer and return pointer to it. After processing buffer must be 327 | * unlocked with V4l2Device::unlock(). 328 | */ 329 | const V4l2Device::VBuffer * V4l2Device::readLock() { 330 | assert(isConnected()); 331 | assert(isStreaming()); 332 | int id = 0; 333 | if((id = dequeueBuffer()) < 0) { 334 | ALOGE("Could not dequeue buffer: %s (%d)", strerror(errno), errno); 335 | return NULL; 336 | } 337 | auto buf = &mBuf[id]; 338 | return buf; 339 | } 340 | 341 | /** 342 | * Unlocks previously locked buffer. 343 | */ 344 | bool V4l2Device::unlock(const VBuffer *buf) { 345 | if(!buf) 346 | return false; 347 | 348 | for(unsigned i = 0; i < NELEM(mBuf); ++i) { 349 | if(mBuf[i].buf == buf->buf) { 350 | if(!queueBuffer(i)) { 351 | ALOGE("Could not queue buffer %d: %s (%d)", i, strerror(errno), errno); 352 | return false; 353 | } 354 | return true; 355 | } 356 | } 357 | return false; 358 | } 359 | 360 | /** 361 | * Returns buffer with specified ID to the kernel 362 | */ 363 | bool V4l2Device::queueBuffer(unsigned id) { 364 | assert(mFd >= 0); 365 | 366 | struct v4l2_buffer bufInfo; 367 | memset(&bufInfo, 0, sizeof(bufInfo)); 368 | bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 369 | bufInfo.memory = V4L2_MEMORY_MMAP; 370 | bufInfo.index = id; 371 | 372 | if(ioctl(mFd, VIDIOC_QBUF, &bufInfo) < 0) 373 | return false; 374 | 375 | return true; 376 | } 377 | 378 | /** 379 | * Dequeues next available buffer and returns its ID. 380 | */ 381 | int V4l2Device::dequeueBuffer() { 382 | assert(mFd >= 0); 383 | 384 | struct v4l2_buffer bufInfo; 385 | 386 | memset(&bufInfo, 0, sizeof(bufInfo)); 387 | bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 388 | bufInfo.memory = V4L2_MEMORY_MMAP; 389 | bufInfo.index = 0; 390 | 391 | #if V4L2DEVICE_FPS_LIMIT > 0 392 | auto timestamp = systemTime(); 393 | nsecs_t extraTime = 1000000000LL / V4L2DEVICE_FPS_LIMIT - (timestamp - mLastTimestamp); 394 | if(extraTime / 1000 > 0) 395 | usleep((unsigned)(extraTime / 1000)); 396 | mLastTimestamp = systemTime(); 397 | #endif 398 | 399 | do { 400 | #ifdef V4L2DEVICE_USE_POLL 401 | if((errno = 0, poll(&mPFd, 1, 5000)) <= 0) { 402 | errno = ETIME; 403 | return -1; 404 | } 405 | #endif 406 | } while((errno = 0, ioctl(mFd, VIDIOC_DQBUF, &bufInfo)) < 0 && (errno == EINVAL || errno == EAGAIN)); 407 | if(errno) 408 | return -1; 409 | 410 | return (int)bufInfo.index; 411 | } 412 | 413 | bool V4l2Device::iocStreamOff() { 414 | assert(mFd >= 0); 415 | assert(mFormat.type); 416 | 417 | errno = 0; 418 | unsigned type = mFormat.type; 419 | if(ioctl(mFd, VIDIOC_STREAMOFF, &type) == 0) { 420 | mStreaming = false; 421 | } else { 422 | ALOGV("%s: %s (%d)", __FUNCTION__, strerror(errno), errno); 423 | } 424 | return !errno; 425 | } 426 | 427 | bool V4l2Device::iocStreamOn() { 428 | assert(mFd >= 0); 429 | assert(mFormat.type); 430 | 431 | errno = 0; 432 | unsigned type = mFormat.type; 433 | if(ioctl(mFd, VIDIOC_STREAMON, &type) == 0) { 434 | mStreaming = true; 435 | } else { 436 | ALOGV("%s: %s (%d)", __FUNCTION__, strerror(errno), errno); 437 | } 438 | return !errno; 439 | } 440 | 441 | bool V4l2Device::iocSFmt(unsigned width, unsigned height) { 442 | assert(mFd >= 0); 443 | assert(!mStreaming); 444 | 445 | struct v4l2_format format; 446 | memset(&format, 0, sizeof(format)); 447 | 448 | format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 449 | format.fmt.pix.pixelformat = V4L2DEVICE_PIXEL_FORMAT; 450 | format.fmt.pix.width = width; 451 | format.fmt.pix.height = height; 452 | 453 | errno = 0; 454 | if(ioctl(mFd, VIDIOC_S_FMT, &format) == 0) { 455 | mFormat = format; 456 | } else { 457 | ALOGV("%s(w=%u, h=%u): %s (%d)", __FUNCTION__, width, height, strerror(errno), errno); 458 | } 459 | 460 | return !errno; 461 | } 462 | 463 | bool V4l2Device::iocReqBufs(unsigned *count) { 464 | assert(mFd >= 0); 465 | assert(count); 466 | 467 | struct v4l2_requestbuffers bufRequest; 468 | memset(&bufRequest, 0, sizeof(bufRequest)); 469 | 470 | bufRequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 471 | bufRequest.memory = V4L2_MEMORY_MMAP; 472 | bufRequest.count = *count; 473 | 474 | errno = 0; 475 | if(ioctl(mFd, VIDIOC_REQBUFS, &bufRequest) == 0) { 476 | *count = bufRequest.count; 477 | } else { 478 | ALOGV("%s(count=%u): %s (%d)", __FUNCTION__, *count, strerror(errno), errno); 479 | } 480 | 481 | return !errno; 482 | } 483 | 484 | bool V4l2Device::iocQueryBuf(unsigned id, unsigned *offset, unsigned *len) { 485 | assert(mFd >= 0); 486 | assert(offset); 487 | assert(len); 488 | 489 | struct v4l2_buffer bufInfo; 490 | memset(&bufInfo, 0, sizeof(bufInfo)); 491 | 492 | bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 493 | bufInfo.memory = V4L2_MEMORY_MMAP; 494 | bufInfo.index = id; 495 | 496 | errno = 0; 497 | if(ioctl(mFd, VIDIOC_QUERYBUF, &bufInfo) == 0) { 498 | *offset = bufInfo.m.offset; 499 | *len = bufInfo.length; 500 | } else { 501 | ALOGV("%s(id=%u): %s (%d)", __FUNCTION__, id, strerror(errno), errno); 502 | } 503 | 504 | return !errno; 505 | } 506 | 507 | bool V4l2Device::setResolutionAndAllocateBuffers(unsigned width, unsigned height) { 508 | assert(!mStreaming); 509 | 510 | for(int i = 0; i < V4L2DEVICE_BUF_COUNT; ++i) { 511 | mBuf[i].unmap(); 512 | } 513 | 514 | if(!iocSFmt(width, height)) { 515 | ALOGE("Could not set pixel format to %dx%d: %s (%d)", width, height, strerror(errno), errno); 516 | return false; 517 | } 518 | 519 | unsigned bufCount = V4L2DEVICE_BUF_COUNT; 520 | if(!iocReqBufs(&bufCount)) { 521 | ALOGE("Could not request buffer: %s (%d)", strerror(errno), errno); 522 | return false; 523 | } 524 | 525 | unsigned bufLen[V4L2DEVICE_BUF_COUNT] = {0}; 526 | 527 | for(unsigned i = 0; i < bufCount; ++i) { 528 | unsigned offset; 529 | if(!iocQueryBuf(i, &offset, &bufLen[i])) { 530 | ALOGE("Could not query buffer %d: %s (%d)", i, strerror(errno), errno); 531 | return false; 532 | } 533 | 534 | if(!mBuf[i].map(mFd, offset, bufLen[i])) { 535 | ALOGE("Could not allocate buffer %d (len = %d): %s (%d)", i, bufLen[i], strerror(errno), errno); 536 | while(i--) mBuf[i].unmap(); 537 | return false; 538 | } 539 | 540 | if(!queueBuffer(i)) { 541 | ALOGE("Could not queue buffer: %s (%d)", strerror(errno), errno); 542 | do mBuf[i].unmap(); while(i--); 543 | return false; 544 | } 545 | } 546 | 547 | return true; 548 | } 549 | 550 | void V4l2Device::cleanup() { 551 | for(int i = 0; i < V4L2DEVICE_BUF_COUNT; ++i) { 552 | mBuf[i].unmap(); 553 | } 554 | 555 | closeFd(&mFd); 556 | mPFd.fd = -1; 557 | mConnected = false; 558 | } 559 | 560 | /******************************************************************************\ 561 | V4l2Device::VBuffer 562 | \******************************************************************************/ 563 | 564 | /** 565 | * \class V4l2Device::VBuffer 566 | * 567 | * Video buffer abstraction. 568 | */ 569 | 570 | V4l2Device::VBuffer::~VBuffer() { 571 | if(buf) { 572 | ALOGD("V4l2Device::VBuffer: Memory leak!"); 573 | abort(); 574 | } 575 | } 576 | 577 | bool V4l2Device::VBuffer::map(int fd, unsigned offset, unsigned len) { 578 | assert(!this->buf); 579 | 580 | errno = 0; 581 | this->buf = (uint8_t*)mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, fd, offset); 582 | if(this->buf == MAP_FAILED) { 583 | return false; 584 | } 585 | memset(this->buf, 0, len); 586 | this->len = len; 587 | this->pixFmt = V4L2DEVICE_PIXEL_FORMAT; 588 | 589 | return true; 590 | } 591 | 592 | void V4l2Device::VBuffer::unmap() { 593 | if(buf) { 594 | munmap(buf, len); 595 | buf = NULL; 596 | len = 0; 597 | } 598 | } 599 | 600 | }; /* namespace android */ 601 | -------------------------------------------------------------------------------- /Camera.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2015-2016 Antmicro 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #define LOG_TAG "Cam-Camera" 18 | #define LOG_NDEBUG NDEBUG 19 | 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | 30 | #include "DbgUtils.h" 31 | #include "Camera.h" 32 | #include "ImageConverter.h" 33 | 34 | extern camera_module_t HAL_MODULE_INFO_SYM; 35 | 36 | namespace android { 37 | /** 38 | * \class Camera 39 | * 40 | * Android's Camera 3 device implementation. 41 | * 42 | * Declaration of camera capabilities, frame request handling, etc. This code 43 | * is what Android framework talks to. 44 | */ 45 | 46 | Camera::Camera() 47 | : mStaticCharacteristics(NULL) 48 | , mCallbackOps(NULL) 49 | , mJpegBufferSize(0) { 50 | DBGUTILS_AUTOLOGCALL(__func__); 51 | for(size_t i = 0; i < NELEM(mDefaultRequestSettings); i++) { 52 | mDefaultRequestSettings[i] = NULL; 53 | } 54 | 55 | common.tag = HARDWARE_DEVICE_TAG; 56 | common.version = CAMERA_DEVICE_API_VERSION_3_0; 57 | common.module = &HAL_MODULE_INFO_SYM.common; 58 | common.close = Camera::sClose; 59 | ops = &sOps; 60 | priv = NULL; 61 | 62 | mValid = true; 63 | mDev = new V4l2Device("/dev/video0"); 64 | if(!mDev) { 65 | mValid = false; 66 | } 67 | } 68 | 69 | Camera::~Camera() { 70 | DBGUTILS_AUTOLOGCALL(__func__); 71 | gWorkers.stop(); 72 | mDev->disconnect(); 73 | delete mDev; 74 | } 75 | 76 | status_t Camera::cameraInfo(struct camera_info *info) { 77 | DBGUTILS_AUTOLOGCALL(__func__); 78 | Mutex::Autolock lock(mMutex); 79 | info->facing = CAMERA_FACING_BACK; 80 | info->orientation = 0; 81 | info->device_version = CAMERA_DEVICE_API_VERSION_3_0; 82 | info->static_camera_characteristics = staticCharacteristics(); 83 | 84 | return NO_ERROR; 85 | } 86 | 87 | int Camera::openDevice(hw_device_t **device) { 88 | DBGUTILS_AUTOLOGCALL(__func__); 89 | Mutex::Autolock lock(mMutex); 90 | mDev->connect(); 91 | *device = &common; 92 | 93 | gWorkers.start(); 94 | 95 | return NO_ERROR; 96 | } 97 | 98 | int Camera::closeDevice() { 99 | DBGUTILS_AUTOLOGCALL(__func__); 100 | Mutex::Autolock lock(mMutex); 101 | 102 | gWorkers.stop(); 103 | mDev->disconnect(); 104 | 105 | return NO_ERROR; 106 | } 107 | 108 | camera_metadata_t *Camera::staticCharacteristics() { 109 | if(mStaticCharacteristics) 110 | return mStaticCharacteristics; 111 | 112 | CameraMetadata cm; 113 | 114 | auto &resolutions = mDev->availableResolutions(); 115 | auto &previewResolutions = resolutions; 116 | auto sensorRes = mDev->sensorResolution(); 117 | 118 | /***********************************\ 119 | |* START OF CAMERA CHARACTERISTICS *| 120 | \***********************************/ 121 | 122 | /* fake, but valid aspect ratio */ 123 | const float sensorInfoPhysicalSize[] = { 124 | 5.0f, 125 | 5.0f * (float)sensorRes.height / (float)sensorRes.width 126 | }; 127 | cm.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorInfoPhysicalSize, NELEM(sensorInfoPhysicalSize)); 128 | 129 | /* fake */ 130 | static const float lensInfoAvailableFocalLengths[] = {3.30f}; 131 | cm.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, lensInfoAvailableFocalLengths, NELEM(lensInfoAvailableFocalLengths)); 132 | 133 | static const uint8_t lensFacing = ANDROID_LENS_FACING_BACK; 134 | cm.update(ANDROID_LENS_FACING, &lensFacing, 1); 135 | const int32_t sensorInfoPixelArraySize[] = { 136 | (int32_t)sensorRes.width, 137 | (int32_t)sensorRes.height 138 | }; 139 | cm.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, sensorInfoPixelArraySize, NELEM(sensorInfoPixelArraySize)); 140 | 141 | const int32_t sensorInfoActiveArraySize[] = { 142 | 0, 0, 143 | (int32_t)sensorRes.width, (int32_t)sensorRes.height 144 | }; 145 | cm.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, sensorInfoActiveArraySize, NELEM(sensorInfoActiveArraySize)); 146 | 147 | static const int32_t scalerAvailableFormats[] = { 148 | HAL_PIXEL_FORMAT_RGBA_8888, 149 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 150 | /* Non-preview one, must be last - see following code */ 151 | HAL_PIXEL_FORMAT_BLOB 152 | }; 153 | cm.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalerAvailableFormats, NELEM(scalerAvailableFormats)); 154 | 155 | /* Only for HAL_PIXEL_FORMAT_BLOB */ 156 | const size_t mainStreamConfigsCount = resolutions.size(); 157 | /* For all other supported pixel formats */ 158 | const size_t previewStreamConfigsCount = previewResolutions.size() * (NELEM(scalerAvailableFormats) - 1); 159 | const size_t streamConfigsCount = mainStreamConfigsCount + previewStreamConfigsCount; 160 | 161 | int32_t scalerAvailableStreamConfigurations[streamConfigsCount * 4]; 162 | int64_t scalerAvailableMinFrameDurations[streamConfigsCount * 4]; 163 | 164 | int32_t scalerAvailableProcessedSizes[previewResolutions.size() * 2]; 165 | int64_t scalerAvailableProcessedMinDurations[previewResolutions.size()]; 166 | int32_t scalerAvailableJpegSizes[resolutions.size() * 2]; 167 | int64_t scalerAvailableJpegMinDurations[resolutions.size()]; 168 | 169 | size_t i4 = 0; 170 | size_t i2 = 0; 171 | size_t i1 = 0; 172 | /* Main stream configurations */ 173 | for(size_t resId = 0; resId < resolutions.size(); ++resId) { 174 | scalerAvailableStreamConfigurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB; 175 | scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)resolutions[resId].width; 176 | scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)resolutions[resId].height; 177 | scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; 178 | 179 | scalerAvailableMinFrameDurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB; 180 | scalerAvailableMinFrameDurations[i4 + 1] = (int32_t)resolutions[resId].width; 181 | scalerAvailableMinFrameDurations[i4 + 2] = (int32_t)resolutions[resId].height; 182 | scalerAvailableMinFrameDurations[i4 + 3] = 1000000000 / 60; /* TODO: read from the device */ 183 | 184 | scalerAvailableJpegSizes[i2 + 0] = (int32_t)resolutions[resId].width; 185 | scalerAvailableJpegSizes[i2 + 1] = (int32_t)resolutions[resId].height; 186 | 187 | scalerAvailableJpegMinDurations[i1] = 1000000000 / 60; /* TODO: read from the device */ 188 | 189 | i4 += 4; 190 | i2 += 2; 191 | i1 += 1; 192 | } 193 | i2 = 0; 194 | i1 = 0; 195 | /* Preview stream configurations */ 196 | for(size_t resId = 0; resId < previewResolutions.size(); ++resId) { 197 | for(size_t fmtId = 0; fmtId < NELEM(scalerAvailableFormats) - 1; ++fmtId) { 198 | scalerAvailableStreamConfigurations[i4 + 0] = scalerAvailableFormats[fmtId]; 199 | scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)previewResolutions[resId].width; 200 | scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)previewResolutions[resId].height; 201 | scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; 202 | 203 | scalerAvailableMinFrameDurations[i4 + 0] = scalerAvailableFormats[fmtId]; 204 | scalerAvailableMinFrameDurations[i4 + 1] = (int32_t)previewResolutions[resId].width; 205 | scalerAvailableMinFrameDurations[i4 + 2] = (int32_t)previewResolutions[resId].height; 206 | scalerAvailableMinFrameDurations[i4 + 3] = 1000000000 / 60; /* TODO: read from the device */ 207 | 208 | i4 += 4; 209 | } 210 | scalerAvailableProcessedSizes[i2 + 0] = (int32_t)previewResolutions[resId].width; 211 | scalerAvailableProcessedSizes[i2 + 1] = (int32_t)previewResolutions[resId].height; 212 | 213 | scalerAvailableProcessedMinDurations[i1] = 1000000000 / 60; /* TODO: read from the device */ 214 | 215 | i2 += 2; 216 | i1 += 1; 217 | } 218 | cm.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, scalerAvailableStreamConfigurations, (size_t)NELEM(scalerAvailableStreamConfigurations)); 219 | cm.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, scalerAvailableMinFrameDurations, (size_t)NELEM(scalerAvailableMinFrameDurations)); 220 | /* Probably fake */ 221 | cm.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, scalerAvailableMinFrameDurations, (size_t)NELEM(scalerAvailableMinFrameDurations)); 222 | cm.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, scalerAvailableJpegSizes, (size_t)NELEM(scalerAvailableJpegSizes)); 223 | cm.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, scalerAvailableJpegMinDurations, (size_t)NELEM(scalerAvailableJpegMinDurations)); 224 | cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, scalerAvailableProcessedSizes, (size_t)NELEM(scalerAvailableProcessedSizes)); 225 | cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, scalerAvailableProcessedMinDurations, (size_t)NELEM(scalerAvailableProcessedMinDurations)); 226 | 227 | /* ~8.25 bit/px (https://en.wikipedia.org/wiki/JPEG#Sample_photographs) */ 228 | /* Use 9 bit/px, add buffer info struct size, round up to page size */ 229 | mJpegBufferSize = sensorRes.width * sensorRes.height * 9 + sizeof(camera3_jpeg_blob); 230 | mJpegBufferSize = (mJpegBufferSize + PAGE_SIZE - 1u) & ~(PAGE_SIZE - 1u); 231 | const int32_t jpegMaxSize = (int32_t)mJpegBufferSize; 232 | cm.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); 233 | 234 | static const int32_t jpegAvailableThumbnailSizes[] = { 235 | 0, 0, 236 | 320, 240 237 | }; 238 | cm.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, NELEM(jpegAvailableThumbnailSizes)); 239 | 240 | static const int32_t sensorOrientation = 90; 241 | cm.update(ANDROID_SENSOR_ORIENTATION, &sensorOrientation, 1); 242 | 243 | static const uint8_t flashInfoAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; 244 | cm.update(ANDROID_FLASH_INFO_AVAILABLE, &flashInfoAvailable, 1); 245 | 246 | static const float scalerAvailableMaxDigitalZoom = 1; 247 | cm.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &scalerAvailableMaxDigitalZoom, 1); 248 | 249 | static const uint8_t statisticsFaceDetectModes[] = { 250 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF 251 | }; 252 | cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, statisticsFaceDetectModes, NELEM(statisticsFaceDetectModes)); 253 | 254 | static const int32_t statisticsInfoMaxFaceCount = 0; 255 | cm.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &statisticsInfoMaxFaceCount, 1); 256 | 257 | static const uint8_t controlAvailableSceneModes[] = { 258 | ANDROID_CONTROL_SCENE_MODE_DISABLED 259 | }; 260 | cm.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, controlAvailableSceneModes, NELEM(controlAvailableSceneModes)); 261 | 262 | static const uint8_t controlAvailableEffects[] = { 263 | ANDROID_CONTROL_EFFECT_MODE_OFF 264 | }; 265 | cm.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, controlAvailableEffects, NELEM(controlAvailableEffects)); 266 | 267 | static const int32_t controlMaxRegions[] = { 268 | 0, /* AE */ 269 | 0, /* AWB */ 270 | 0 /* AF */ 271 | }; 272 | cm.update(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, NELEM(controlMaxRegions)); 273 | 274 | static const uint8_t controlAeAvailableModes[] = { 275 | ANDROID_CONTROL_AE_MODE_OFF 276 | }; 277 | cm.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, controlAeAvailableModes, NELEM(controlAeAvailableModes)); 278 | 279 | static const camera_metadata_rational controlAeCompensationStep = {1, 3}; 280 | cm.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, &controlAeCompensationStep, 1); 281 | 282 | int32_t controlAeCompensationRange[] = {-9, 9}; 283 | cm.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, NELEM(controlAeCompensationRange)); 284 | 285 | static const int32_t controlAeAvailableTargetFpsRanges[] = { 286 | 60, 60 287 | }; 288 | cm.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, controlAeAvailableTargetFpsRanges, NELEM(controlAeAvailableTargetFpsRanges)); 289 | 290 | static const uint8_t controlAeAvailableAntibandingModes[] = { 291 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF 292 | }; 293 | cm.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, controlAeAvailableAntibandingModes, NELEM(controlAeAvailableAntibandingModes)); 294 | 295 | static const uint8_t controlAwbAvailableModes[] = { 296 | ANDROID_CONTROL_AWB_MODE_AUTO, 297 | ANDROID_CONTROL_AWB_MODE_OFF 298 | }; 299 | cm.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, controlAwbAvailableModes, NELEM(controlAwbAvailableModes)); 300 | 301 | static const uint8_t controlAfAvailableModes[] = { 302 | ANDROID_CONTROL_AF_MODE_OFF 303 | }; 304 | cm.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, controlAfAvailableModes, NELEM(controlAfAvailableModes)); 305 | 306 | static const uint8_t controlAvailableVideoStabilizationModes[] = { 307 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF 308 | }; 309 | cm.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, controlAvailableVideoStabilizationModes, NELEM(controlAvailableVideoStabilizationModes)); 310 | 311 | const uint8_t infoSupportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; 312 | cm.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &infoSupportedHardwareLevel, 1); 313 | 314 | /***********************************\ 315 | |* END OF CAMERA CHARACTERISTICS *| 316 | \***********************************/ 317 | 318 | mStaticCharacteristics = cm.release(); 319 | return mStaticCharacteristics; 320 | } 321 | 322 | int Camera::initialize(const camera3_callback_ops_t *callbackOps) { 323 | DBGUTILS_AUTOLOGCALL(__func__); 324 | Mutex::Autolock lock(mMutex); 325 | 326 | mCallbackOps = callbackOps; 327 | return NO_ERROR; 328 | } 329 | 330 | const camera_metadata_t * Camera::constructDefaultRequestSettings(int type) { 331 | DBGUTILS_AUTOLOGCALL(__func__); 332 | Mutex::Autolock lock(mMutex); 333 | /* TODO: validate type */ 334 | 335 | if(mDefaultRequestSettings[type]) { 336 | return mDefaultRequestSettings[type]; 337 | } 338 | 339 | CameraMetadata cm; 340 | 341 | static const int32_t requestId = 0; 342 | cm.update(ANDROID_REQUEST_ID, &requestId, 1); 343 | 344 | static const float lensFocusDistance = 0.0f; 345 | cm.update(ANDROID_LENS_FOCUS_DISTANCE, &lensFocusDistance, 1); 346 | 347 | auto sensorSize = mDev->sensorResolution(); 348 | const int32_t scalerCropRegion[] = { 349 | 0, 0, 350 | (int32_t)sensorSize.width, (int32_t)sensorSize.height 351 | }; 352 | cm.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, NELEM(scalerCropRegion)); 353 | 354 | static const int32_t jpegThumbnailSize[] = { 355 | 0, 0 356 | }; 357 | cm.update(ANDROID_JPEG_THUMBNAIL_SIZE, jpegThumbnailSize, NELEM(jpegThumbnailSize)); 358 | 359 | static const uint8_t jpegThumbnailQuality = 50; 360 | cm.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegThumbnailQuality, 1); 361 | 362 | static const double jpegGpsCoordinates[] = { 363 | 0, 0 364 | }; 365 | cm.update(ANDROID_JPEG_GPS_COORDINATES, jpegGpsCoordinates, NELEM(jpegGpsCoordinates)); 366 | 367 | static const uint8_t jpegGpsProcessingMethod[32] = "None"; 368 | cm.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, jpegGpsProcessingMethod, NELEM(jpegGpsProcessingMethod)); 369 | 370 | static const int64_t jpegGpsTimestamp = 0; 371 | cm.update(ANDROID_JPEG_GPS_TIMESTAMP, &jpegGpsTimestamp, 1); 372 | 373 | static const int32_t jpegOrientation = 0; 374 | cm.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 375 | 376 | /** android.stats */ 377 | 378 | static const uint8_t statisticsFaceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 379 | cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &statisticsFaceDetectMode, 1); 380 | 381 | static const uint8_t statisticsHistogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; 382 | cm.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &statisticsHistogramMode, 1); 383 | 384 | static const uint8_t statisticsSharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; 385 | cm.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &statisticsSharpnessMapMode, 1); 386 | 387 | uint8_t controlCaptureIntent = 0; 388 | switch (type) { 389 | case CAMERA3_TEMPLATE_PREVIEW: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; break; 390 | case CAMERA3_TEMPLATE_STILL_CAPTURE: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; break; 391 | case CAMERA3_TEMPLATE_VIDEO_RECORD: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; break; 392 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; break; 393 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; break; 394 | default: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; break; 395 | } 396 | cm.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlCaptureIntent, 1); 397 | 398 | static const uint8_t controlMode = ANDROID_CONTROL_MODE_OFF; 399 | cm.update(ANDROID_CONTROL_MODE, &controlMode, 1); 400 | 401 | static const uint8_t controlEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 402 | cm.update(ANDROID_CONTROL_EFFECT_MODE, &controlEffectMode, 1); 403 | 404 | static const uint8_t controlSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; 405 | cm.update(ANDROID_CONTROL_SCENE_MODE, &controlSceneMode, 1); 406 | 407 | static const uint8_t controlAeMode = ANDROID_CONTROL_AE_MODE_OFF; 408 | cm.update(ANDROID_CONTROL_AE_MODE, &controlAeMode, 1); 409 | 410 | static const uint8_t controlAeLock = ANDROID_CONTROL_AE_LOCK_OFF; 411 | cm.update(ANDROID_CONTROL_AE_LOCK, &controlAeLock, 1); 412 | 413 | static const int32_t controlAeRegions[] = { 414 | 0, 0, 415 | (int32_t)sensorSize.width, (int32_t)sensorSize.height, 416 | 1000 417 | }; 418 | cm.update(ANDROID_CONTROL_AE_REGIONS, controlAeRegions, NELEM(controlAeRegions)); 419 | cm.update(ANDROID_CONTROL_AWB_REGIONS, controlAeRegions, NELEM(controlAeRegions)); 420 | cm.update(ANDROID_CONTROL_AF_REGIONS, controlAeRegions, NELEM(controlAeRegions)); 421 | 422 | static const int32_t controlAeExposureCompensation = 0; 423 | cm.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &controlAeExposureCompensation, 1); 424 | 425 | static const int32_t controlAeTargetFpsRange[] = { 426 | 10, 60 427 | }; 428 | cm.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, controlAeTargetFpsRange, NELEM(controlAeTargetFpsRange)); 429 | 430 | static const uint8_t controlAeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF; 431 | cm.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &controlAeAntibandingMode, 1); 432 | 433 | static const uint8_t controlAwbMode = ANDROID_CONTROL_AWB_MODE_OFF; 434 | cm.update(ANDROID_CONTROL_AWB_MODE, &controlAwbMode, 1); 435 | 436 | static const uint8_t controlAwbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 437 | cm.update(ANDROID_CONTROL_AWB_LOCK, &controlAwbLock, 1); 438 | 439 | uint8_t controlAfMode = ANDROID_CONTROL_AF_MODE_OFF; 440 | cm.update(ANDROID_CONTROL_AF_MODE, &controlAfMode, 1); 441 | 442 | static const uint8_t controlAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 443 | cm.update(ANDROID_CONTROL_AE_STATE, &controlAeState, 1); 444 | static const uint8_t controlAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 445 | cm.update(ANDROID_CONTROL_AF_STATE, &controlAfState, 1); 446 | static const uint8_t controlAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 447 | cm.update(ANDROID_CONTROL_AWB_STATE, &controlAwbState, 1); 448 | 449 | static const uint8_t controlVideoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 450 | cm.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &controlVideoStabilizationMode, 1); 451 | 452 | static const int32_t controlAePrecaptureId = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 453 | cm.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, &controlAePrecaptureId, 1); 454 | 455 | static const int32_t controlAfTriggerId = 0; 456 | cm.update(ANDROID_CONTROL_AF_TRIGGER_ID, &controlAfTriggerId, 1); 457 | 458 | mDefaultRequestSettings[type] = cm.release(); 459 | return mDefaultRequestSettings[type]; 460 | } 461 | 462 | int Camera::configureStreams(camera3_stream_configuration_t *streamList) { 463 | DBGUTILS_AUTOLOGCALL(__func__); 464 | Mutex::Autolock lock(mMutex); 465 | 466 | /* TODO: sanity checks */ 467 | 468 | ALOGV("+-------------------------------------------------------------------------------"); 469 | ALOGV("| STREAMS FROM FRAMEWORK"); 470 | ALOGV("+-------------------------------------------------------------------------------"); 471 | for(size_t i = 0; i < streamList->num_streams; ++i) { 472 | camera3_stream_t *newStream = streamList->streams[i]; 473 | ALOGV("| p=%p fmt=0x%.2x type=%u usage=0x%.8x size=%4ux%-4u buf_no=%u", 474 | newStream, 475 | newStream->format, 476 | newStream->stream_type, 477 | newStream->usage, 478 | newStream->width, 479 | newStream->height, 480 | newStream->max_buffers); 481 | } 482 | ALOGV("+-------------------------------------------------------------------------------"); 483 | 484 | /* TODO: do we need input stream? */ 485 | camera3_stream_t *inStream = NULL; 486 | unsigned width = 0; 487 | unsigned height = 0; 488 | for(size_t i = 0; i < streamList->num_streams; ++i) { 489 | camera3_stream_t *newStream = streamList->streams[i]; 490 | 491 | /* TODO: validate: null */ 492 | 493 | if(newStream->stream_type == CAMERA3_STREAM_INPUT || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 494 | if(inStream) { 495 | ALOGE("Only one input/bidirectional stream allowed (previous is %p, this %p)", inStream, newStream); 496 | return BAD_VALUE; 497 | } 498 | inStream = newStream; 499 | } 500 | 501 | /* TODO: validate format */ 502 | 503 | if(newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 504 | newStream->format = HAL_PIXEL_FORMAT_RGBA_8888; 505 | } 506 | 507 | /* TODO: support ZSL */ 508 | if(newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) { 509 | ALOGE("ZSL STREAM FOUND! It is not supported for now."); 510 | ALOGE(" Disable it by placing following line in /system/build.prop:"); 511 | ALOGE(" camera.disable_zsl_mode=1"); 512 | return BAD_VALUE; 513 | } 514 | 515 | switch(newStream->stream_type) { 516 | case CAMERA3_STREAM_OUTPUT: newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN; break; 517 | case CAMERA3_STREAM_INPUT: newStream->usage = GRALLOC_USAGE_SW_READ_OFTEN; break; 518 | case CAMERA3_STREAM_BIDIRECTIONAL: newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN; break; 519 | } 520 | newStream->max_buffers = 1; /* TODO: support larger queue */ 521 | 522 | if(newStream->width * newStream->height > width * height) { 523 | width = newStream->width; 524 | height = newStream->height; 525 | } 526 | 527 | /* TODO: store stream pointers somewhere and configure only new ones */ 528 | } 529 | 530 | if(!mDev->setStreaming(false)) { 531 | ALOGE("Could not stop streaming"); 532 | return NO_INIT; 533 | } 534 | if(!mDev->setResolution(width, height)) { 535 | ALOGE("Could not set resolution"); 536 | return NO_INIT; 537 | } 538 | 539 | ALOGV("+-------------------------------------------------------------------------------"); 540 | ALOGV("| STREAMS AFTER CHANGES"); 541 | ALOGV("+-------------------------------------------------------------------------------"); 542 | for(size_t i = 0; i < streamList->num_streams; ++i) { 543 | const camera3_stream_t *newStream = streamList->streams[i]; 544 | ALOGV("| p=%p fmt=0x%.2x type=%u usage=0x%.8x size=%4ux%-4u buf_no=%u", 545 | newStream, 546 | newStream->format, 547 | newStream->stream_type, 548 | newStream->usage, 549 | newStream->width, 550 | newStream->height, 551 | newStream->max_buffers); 552 | } 553 | ALOGV("+-------------------------------------------------------------------------------"); 554 | 555 | if(!mDev->setStreaming(true)) { 556 | ALOGE("Could not start streaming"); 557 | return NO_INIT; 558 | } 559 | 560 | return NO_ERROR; 561 | } 562 | 563 | int Camera::registerStreamBuffers(const camera3_stream_buffer_set_t *bufferSet) { 564 | DBGUTILS_AUTOLOGCALL(__func__); 565 | Mutex::Autolock lock(mMutex); 566 | ALOGV("+-------------------------------------------------------------------------------"); 567 | ALOGV("| BUFFERS FOR STREAM %p", bufferSet->stream); 568 | ALOGV("+-------------------------------------------------------------------------------"); 569 | for (size_t i = 0; i < bufferSet->num_buffers; ++i) { 570 | ALOGV("| p=%p", bufferSet->buffers[i]); 571 | } 572 | ALOGV("+-------------------------------------------------------------------------------"); 573 | 574 | return OK; 575 | } 576 | 577 | int Camera::processCaptureRequest(camera3_capture_request_t *request) { 578 | assert(request != NULL); 579 | Mutex::Autolock lock(mMutex); 580 | 581 | BENCHMARK_HERE(120); 582 | FPSCOUNTER_HERE(120); 583 | 584 | CameraMetadata cm; 585 | const V4l2Device::VBuffer *frame = NULL; 586 | auto res = mDev->resolution(); 587 | status_t e; 588 | Vector buffers; 589 | 590 | auto timestamp = systemTime(); 591 | 592 | ALOGV("--- capture request --- f=%-5u in_buf=%p out_bufs=%p[%u] --- fps %4.1f (avg %4.1f)", 593 | request->frame_number, 594 | request->input_buffer, 595 | request->output_buffers, 596 | request->num_output_buffers, 597 | FPSCOUNTER_VALUE(1), FPSCOUNTER_VALUE()); 598 | 599 | if(request->settings == NULL && mLastRequestSettings.isEmpty()) { 600 | ALOGE("First request does not have metadata"); 601 | return BAD_VALUE; 602 | } 603 | 604 | if(request->input_buffer) { 605 | /* Ignore input buffer */ 606 | /* TODO: do we expect any input buffer? */ 607 | request->input_buffer->release_fence = -1; 608 | } 609 | 610 | if(!request->settings) { 611 | cm.acquire(mLastRequestSettings); 612 | } else { 613 | cm = request->settings; 614 | } 615 | 616 | notifyShutter(request->frame_number, (uint64_t)timestamp); 617 | 618 | BENCHMARK_SECTION("Lock/Read") { 619 | frame = mDev->readLock(); 620 | } 621 | 622 | if(!frame) { 623 | return NOT_ENOUGH_DATA; 624 | } 625 | 626 | buffers.setCapacity(request->num_output_buffers); 627 | 628 | uint8_t *rgbaBuffer = NULL; 629 | for(size_t i = 0; i < request->num_output_buffers; ++i) { 630 | const camera3_stream_buffer &srcBuf = request->output_buffers[i]; 631 | uint8_t *buf = NULL; 632 | 633 | sp acquireFence = new Fence(srcBuf.acquire_fence); 634 | e = acquireFence->wait(1000); /* FIXME: magic number */ 635 | if(e == TIMED_OUT) { 636 | ALOGE("buffer %p frame %-4u Wait on acquire fence timed out", srcBuf.buffer, request->frame_number); 637 | } 638 | if(e == NO_ERROR) { 639 | const Rect rect((int)srcBuf.stream->width, (int)srcBuf.stream->height); 640 | e = GraphicBufferMapper::get().lock(*srcBuf.buffer, GRALLOC_USAGE_SW_WRITE_OFTEN, rect, (void **)&buf); 641 | if(e != NO_ERROR) { 642 | ALOGE("buffer %p frame %-4u lock failed", srcBuf.buffer, request->frame_number); 643 | } 644 | } 645 | if(e != NO_ERROR) { 646 | do GraphicBufferMapper::get().unlock(*request->output_buffers[i].buffer); while(i--); 647 | return NO_INIT; 648 | } 649 | 650 | switch(srcBuf.stream->format) { 651 | case HAL_PIXEL_FORMAT_RGBA_8888: { 652 | if(!rgbaBuffer) { 653 | BENCHMARK_SECTION("YUV->RGBA") { 654 | /* FIXME: better format detection */ 655 | if(frame->pixFmt == V4L2_PIX_FMT_UYVY) 656 | mConverter.UYVYToRGBA(frame->buf, buf, res.width, res.height); 657 | else 658 | mConverter.YUY2ToRGBA(frame->buf, buf, res.width, res.height); 659 | rgbaBuffer = buf; 660 | } 661 | } else { 662 | BENCHMARK_SECTION("Buf Copy") { 663 | memcpy(buf, rgbaBuffer, srcBuf.stream->width * srcBuf.stream->height * 4); 664 | } 665 | } 666 | break; 667 | } 668 | case HAL_PIXEL_FORMAT_BLOB: { 669 | BENCHMARK_SECTION("YUV->JPEG") { 670 | const size_t maxImageSize = mJpegBufferSize - sizeof(camera3_jpeg_blob); 671 | uint8_t jpegQuality = 95; 672 | if(cm.exists(ANDROID_JPEG_QUALITY)) { 673 | jpegQuality = *cm.find(ANDROID_JPEG_QUALITY).data.u8; 674 | } 675 | ALOGD("JPEG quality = %u", jpegQuality); 676 | 677 | /* FIXME: better format detection */ 678 | uint8_t *bufEnd = NULL; 679 | if(frame->pixFmt == V4L2_PIX_FMT_UYVY) 680 | bufEnd = mConverter.UYVYToJPEG(frame->buf, buf, res.width, res.height, maxImageSize, jpegQuality); 681 | else 682 | bufEnd = mConverter.YUY2ToJPEG(frame->buf, buf, res.width, res.height, maxImageSize, jpegQuality); 683 | 684 | if(bufEnd != buf) { 685 | camera3_jpeg_blob *jpegBlob = reinterpret_cast(buf + maxImageSize); 686 | jpegBlob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; 687 | jpegBlob->jpeg_size = (uint32_t)(bufEnd - buf); 688 | } else { 689 | ALOGE("%s: JPEG image too big!", __FUNCTION__); 690 | } 691 | } 692 | break; 693 | } 694 | default: 695 | ALOGE("Unknown pixel format %d in buffer %p (stream %p), ignoring", srcBuf.stream->format, srcBuf.buffer, srcBuf.stream); 696 | } 697 | } 698 | 699 | /* Unlocking all buffers in separate loop allows to copy data from already processed buffer to not yet processed one */ 700 | for(size_t i = 0; i < request->num_output_buffers; ++i) { 701 | const camera3_stream_buffer &srcBuf = request->output_buffers[i]; 702 | 703 | GraphicBufferMapper::get().unlock(*srcBuf.buffer); 704 | buffers.push_back(srcBuf); 705 | buffers.editTop().acquire_fence = -1; 706 | buffers.editTop().release_fence = -1; 707 | buffers.editTop().status = CAMERA3_BUFFER_STATUS_OK; 708 | } 709 | 710 | BENCHMARK_SECTION("Unlock") { 711 | mDev->unlock(frame); 712 | } 713 | 714 | int64_t sensorTimestamp = timestamp; 715 | int64_t syncFrameNumber = request->frame_number; 716 | 717 | cm.update(ANDROID_SENSOR_TIMESTAMP, &sensorTimestamp, 1); 718 | cm.update(ANDROID_SYNC_FRAME_NUMBER, &syncFrameNumber, 1); 719 | 720 | auto result = cm.getAndLock(); 721 | processCaptureResult(request->frame_number, result, buffers); 722 | cm.unlock(result); 723 | 724 | // Cache the settings for next time 725 | mLastRequestSettings.acquire(cm); 726 | 727 | /* Print stats */ 728 | char bmOut[1024]; 729 | BENCHMARK_STRING(bmOut, sizeof(bmOut), 6); 730 | ALOGV(" time (avg): %s", bmOut); 731 | 732 | return NO_ERROR; 733 | } 734 | 735 | inline void Camera::notifyShutter(uint32_t frameNumber, uint64_t timestamp) { 736 | camera3_notify_msg_t msg; 737 | msg.type = CAMERA3_MSG_SHUTTER; 738 | msg.message.shutter.frame_number = frameNumber; 739 | msg.message.shutter.timestamp = timestamp; 740 | mCallbackOps->notify(mCallbackOps, &msg); 741 | } 742 | 743 | void Camera::processCaptureResult(uint32_t frameNumber, const camera_metadata_t *result, const Vector &buffers) { 744 | camera3_capture_result captureResult; 745 | captureResult.frame_number = frameNumber; 746 | captureResult.result = result; 747 | captureResult.num_output_buffers = buffers.size(); 748 | captureResult.output_buffers = buffers.array(); 749 | captureResult.input_buffer = NULL; 750 | captureResult.partial_result = 0; 751 | 752 | mCallbackOps->process_capture_result(mCallbackOps, &captureResult); 753 | } 754 | 755 | /******************************************************************************\ 756 | STATIC WRAPPERS 757 | \******************************************************************************/ 758 | 759 | int Camera::sClose(hw_device_t *device) { 760 | /* TODO: check device module */ 761 | Camera *thiz = static_cast(reinterpret_cast(device)); 762 | return thiz->closeDevice(); 763 | } 764 | 765 | int Camera::sInitialize(const camera3_device *device, const camera3_callback_ops_t *callback_ops) { 766 | /* TODO: check pointers */ 767 | Camera *thiz = static_cast(const_cast(device)); 768 | return thiz->initialize(callback_ops); 769 | } 770 | 771 | int Camera::sConfigureStreams(const camera3_device *device, camera3_stream_configuration_t *stream_list) { 772 | /* TODO: check pointers */ 773 | Camera *thiz = static_cast(const_cast(device)); 774 | return thiz->configureStreams(stream_list); 775 | } 776 | 777 | int Camera::sRegisterStreamBuffers(const camera3_device *device, const camera3_stream_buffer_set_t *buffer_set) { 778 | /* TODO: check pointers */ 779 | Camera *thiz = static_cast(const_cast(device)); 780 | return thiz->registerStreamBuffers(buffer_set); 781 | } 782 | 783 | const camera_metadata_t * Camera::sConstructDefaultRequestSettings(const camera3_device *device, int type) { 784 | /* TODO: check pointers */ 785 | Camera *thiz = static_cast(const_cast(device)); 786 | return thiz->constructDefaultRequestSettings(type); 787 | } 788 | 789 | int Camera::sProcessCaptureRequest(const camera3_device *device, camera3_capture_request_t *request) { 790 | /* TODO: check pointers */ 791 | Camera *thiz = static_cast(const_cast(device)); 792 | return thiz->processCaptureRequest(request); 793 | } 794 | 795 | void Camera::sGetMetadataVendorTagOps(const camera3_device *device, vendor_tag_query_ops_t *ops) { 796 | /* TODO: implement */ 797 | ALOGD("%s: IMPLEMENT ME!", __FUNCTION__); 798 | } 799 | 800 | void Camera::sDump(const camera3_device *device, int fd) { 801 | /* TODO: implement */ 802 | ALOGD("%s: IMPLEMENT ME!", __FUNCTION__); 803 | } 804 | 805 | int Camera::sFlush(const camera3_device *device) { 806 | /* TODO: implement */ 807 | ALOGD("%s: IMPLEMENT ME!", __FUNCTION__); 808 | return -ENODEV; 809 | } 810 | 811 | camera3_device_ops_t Camera::sOps = { 812 | .initialize = Camera::sInitialize, 813 | .configure_streams = Camera::sConfigureStreams, 814 | .register_stream_buffers = Camera::sRegisterStreamBuffers, 815 | .construct_default_request_settings = Camera::sConstructDefaultRequestSettings, 816 | .process_capture_request = Camera::sProcessCaptureRequest, 817 | .get_metadata_vendor_tag_ops = Camera::sGetMetadataVendorTagOps, 818 | .dump = Camera::sDump, 819 | .flush = Camera::sFlush, 820 | .reserved = {0} 821 | }; 822 | 823 | }; /* namespace android */ 824 | --------------------------------------------------------------------------------