├── CMakeLists.txt ├── README.md ├── example ├── CMakeLists.txt ├── takephoto.cpp └── takevideo.cpp ├── include ├── lccv.hpp ├── libcamera_app.hpp └── libcamera_app_options.hpp └── src ├── lccv.cpp ├── libcamera_app.cpp └── libcamera_app_options.cpp /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.6) 2 | 3 | project(lccv) 4 | 5 | if (NOT EXISTS ${CMAKE_BINARY_DIR}/CMakeCache.txt) 6 | if (NOT CMAKE_BUILD_TYPE) 7 | set(CMAKE_BUILD_TYPE "Release" CACHE STRING "" FORCE) 8 | message(STATUS "No previous build - default to Release build") 9 | endif() 10 | endif() 11 | 12 | set (CMAKE_EXPORT_COMPILE_COMMANDS ON) 13 | set (CMAKE_CXX_STANDARD 17) 14 | add_compile_options(-Wall -Wextra -pedantic -Wno-unused-parameter -faligned-new -Werror -Wfatal-errors) 15 | add_definitions(-D_FILE_OFFSET_BITS=64) 16 | 17 | if (CMAKE_COMPILER_IS_GNUCXX) 18 | add_compile_options(-Wno-psabi) 19 | endif() 20 | 21 | option(BUILD_SHARED_LIBS "Build using shared libraries" ON) 22 | 23 | IF (NOT ENABLE_COMPILE_FLAGS_FOR_TARGET) 24 | # On a Pi this will give us armhf or arm64. 25 | execute_process(COMMAND dpkg-architecture -qDEB_HOST_ARCH 26 | OUTPUT_VARIABLE ENABLE_COMPILE_FLAGS_FOR_TARGET OUTPUT_STRIP_TRAILING_WHITESPACE) 27 | endif() 28 | message(STATUS "Platform: ${ENABLE_COMPILE_FLAGS_FOR_TARGET}") 29 | if ("${ENABLE_COMPILE_FLAGS_FOR_TARGET}" STREQUAL "arm64") 30 | # 64-bit binaries can be fully optimised. 31 | add_definitions(-ftree-vectorize) 32 | elseif ("${ENABLE_COMPILE_FLAGS_FOR_TARGET}" STREQUAL "armv8-neon") 33 | # Only build with 32-bit Pi 3/4 specific optimisations if requested on the command line. 34 | add_definitions(-mfpu=neon-fp-armv8 -ftree-vectorize) 35 | endif() 36 | 37 | # Source package generation setup. 38 | set(CPACK_GENERATOR "TXZ") 39 | set(CPACK_PACKAGE_FILE_NAME "lccv-build") 40 | set(CPACK_SOURCE_GENERATOR "TXZ") 41 | set(CPACK_INSTALL_SCRIPTS ${CMAKE_SOURCE_DIR}/package.cmake) 42 | set(CPACK_SOURCE_PACKAGE_FILE_NAME "lccv-src") 43 | set(CPACK_SOURCE_IGNORE_FILES "/\.git*;/build;") 44 | include(CPack) 45 | 46 | find_package(PkgConfig REQUIRED) 47 | find_package(OpenCV REQUIRED) 48 | 49 | pkg_check_modules(LIBCAMERA REQUIRED libcamera) 50 | message(STATUS "libcamera library found:") 51 | message(STATUS " version: ${LIBCAMERA_VERSION}") 52 | message(STATUS " libraries: ${LIBCAMERA_LINK_LIBRARIES}") 53 | message(STATUS " include path: ${LIBCAMERA_INCLUDE_DIRS}") 54 | 55 | include(GNUInstallDirs) 56 | 57 | set(SOURCES 58 | src/lccv.cpp 59 | src/libcamera_app.cpp 60 | src/libcamera_app_options.cpp 61 | ) 62 | set(HEADERS 63 | include/lccv.hpp 64 | include/libcamera_app.hpp 65 | include/libcamera_app_options.hpp 66 | ) 67 | 68 | add_library(liblccv ${SOURCES} ${HEADERS}) 69 | target_include_directories(liblccv 70 | PUBLIC 71 | include 72 | ${LIBCAMERA_INCLUDE_DIRS} 73 | ${OPENCV_INCLUDE_DIRS} 74 | ) 75 | 76 | set_target_properties(liblccv PROPERTIES PREFIX "" IMPORT_PREFIX "") 77 | target_link_libraries(liblccv pthread ${LIBCAMERA_LINK_LIBRARIES} ${OpenCV_LIBS}) 78 | 79 | install(TARGETS liblccv 80 | LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} 81 | ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} 82 | ) 83 | install( 84 | FILES ${HEADERS} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} 85 | ) 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | libcamera bindings for OpenCV 2 | ============================= 3 | 4 | LCCV (*libcamera bindings for OpenCV*) is a small wrapper library that provides access to the Raspberry Pi camera in OpenCV. 5 | 6 | ### WARNING: 7 | 8 | This is still an early version of the project, so expect to have some bugs. 9 | 10 | Please help with the development by reporting the bugs and issues you encounter, committing bugfixes, and proposing ideas! 11 | 12 | Context 13 | ------- 14 | 15 | In Raspbian Bullseye, the Raspberry Pi camera framework was completely rebased from MMAL to the libcamera library - thus breaking most of the previous camera dependencies. 16 | 17 | Raspbian comes with the handy `libcamera-apps` package that duplicates the old `raspistill` and `raspivid` applications, with some added functionnality, like the possibility of adding postprocessing routines to the capturing process. 18 | 19 | However this is still limited, as it doesn't allow full integration of the camera in your software. 20 | 21 | LCCV aims to provide a simple to use wrapper library that allows you to access the camera from a C++ program and capture images in cv::Mat format. 22 | 23 | Features and limitations 24 | ------------------------ 25 | 26 | LCCV is heavily based on Raspbian's `libcamera-apps` source code. It is aimed to offer full control over the camera, so the original options class was kept instead of a new one based on OpenCV's VideoCapture class. Note that only the camera parameters are available, other parameters and functions, like previewing, cropping and post-processing were stripped from the library. 27 | 28 | Prerequisites 29 | ------------- 30 | 31 | - Raspbian Bullseye 32 | - Development libraries (gcc/clang, cmake, git) 33 | - libcamera (with development packages) 34 | - OpenCV (with development packages) 35 | 36 | Install everything using the following command: 37 | 38 | sudo apt install build-essential cmake git libcamera-dev libopencv-dev 39 | 40 | Building and installing 41 | ----------------------- 42 | 43 | git clone https://github.com/kbarni/LCCV.git 44 | cd LCCV 45 | mkdir build 46 | cd build 47 | cmake .. 48 | make 49 | sudo make install 50 | 51 | Using the library 52 | ----------------- 53 | 54 | Please refer to the [wiki](https://github.com/kbarni/LCCV/wiki) 55 | 56 | Also see some example code in the `examples` folder. 57 | 58 | License 59 | ------- 60 | 61 | The source code is made available under the simplified [BSD 2-Clause license](https://spdx.org/licenses/BSD-2-Clause.html). 62 | -------------------------------------------------------------------------------- /example/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(takephoto) 2 | cmake_minimum_required(VERSION 3.0.0) 3 | 4 | set (CMAKE_CXX_STANDARD 17) 5 | 6 | find_package(OpenCV REQUIRED) 7 | find_package(PkgConfig REQUIRED) 8 | pkg_check_modules(LIBCAMERA REQUIRED libcamera) 9 | 10 | include_directories(${LIBCAMERA_INCLUDE_DIRS} ${OPENCV_INCLUDE_DIRS}) 11 | add_executable(takephoto takephoto.cpp) 12 | add_executable(takevideo takevideo.cpp) 13 | 14 | target_link_libraries(takephoto -llccv ${OpenCV_LIBS}) 15 | target_link_libraries(takevideo -llccv ${OpenCV_LIBS}) 16 | -------------------------------------------------------------------------------- /example/takephoto.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int main() 5 | { 6 | cv::Mat image; 7 | lccv::PiCamera cam; 8 | //cam.options->width=4056; 9 | //cam.options->height=3040; 10 | cam.options->photo_width=2028; 11 | cam.options->photo_height=1520; 12 | cam.options->verbose=true; 13 | cv::namedWindow("Image",cv::WINDOW_NORMAL); 14 | for(int i=0;i<100;i++){ 15 | std::cout< 2 | #include 3 | #include 4 | 5 | int main() { 6 | uint32_t num_cams = LibcameraApp::GetNumberCameras(); 7 | std::cout << "Found " << num_cams << " cameras." << std::endl; 8 | 9 | uint32_t height = 480; 10 | uint32_t width = 640; 11 | std::cout<<"Sample program for LCCV video capture"<video_width=width; 17 | cam.options->video_height=height; 18 | cam.options->framerate=30; 19 | cam.options->verbose=true; 20 | cv::namedWindow("Video",cv::WINDOW_NORMAL); 21 | cam.startVideo(); 22 | 23 | lccv::PiCamera cam2(1); 24 | cam2.options->video_width=width; 25 | cam2.options->video_height=height; 26 | cam2.options->framerate=30; 27 | cam2.options->verbose=true; 28 | if (1 < num_cams) { 29 | cam2.startVideo(); 30 | } 31 | 32 | int ch=0; 33 | while(ch!=27){ 34 | if (!cam.getVideoFrame(image,1000)){ 35 | std::cout<<"Timeout error"< 5 | #include 6 | #include 7 | #include 8 | 9 | #include "libcamera_app.hpp" 10 | #include "libcamera_app_options.hpp" 11 | 12 | namespace lccv { 13 | 14 | class PiCamera { 15 | public: 16 | PiCamera(); 17 | PiCamera(uint32_t id); 18 | ~PiCamera(); 19 | 20 | Options *options; 21 | 22 | //Photo mode 23 | bool startPhoto(); 24 | bool capturePhoto(cv::Mat &frame); 25 | bool stopPhoto(); 26 | 27 | //Video mode 28 | bool startVideo(); 29 | bool getVideoFrame(cv::Mat &frame, unsigned int timeout); 30 | void stopVideo(); 31 | 32 | //Applies new zoom options. Before invoking this func modify options->roi. 33 | void ApplyZoomOptions(); 34 | 35 | protected: 36 | void run(); 37 | 38 | std::unique_ptr app; 39 | void getImage(cv::Mat &frame, CompletedRequestPtr &payload); 40 | static void *videoThreadFunc(void *p); 41 | pthread_t videothread; 42 | unsigned int still_flags; 43 | unsigned int vw,vh,vstr; 44 | std::atomic running,frameready; 45 | uint8_t *framebuffer; 46 | std::mutex mtx; 47 | bool camerastarted; 48 | }; 49 | 50 | } 51 | #endif 52 | -------------------------------------------------------------------------------- /include/libcamera_app.hpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020-2021, Raspberry Pi (Trading) Ltd. 4 | * 5 | * libcamera_app.hpp - base class for libcamera apps. 6 | */ 7 | 8 | #pragma once 9 | 10 | #include 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | 34 | class Options; 35 | struct CompletedRequest; 36 | using CompletedRequestPtr = std::shared_ptr; 37 | 38 | namespace controls = libcamera::controls; 39 | namespace properties = libcamera::properties; 40 | 41 | class LibcameraApp 42 | { 43 | public: 44 | using Stream = libcamera::Stream; 45 | using FrameBuffer = libcamera::FrameBuffer; 46 | using ControlList = libcamera::ControlList; 47 | using Request = libcamera::Request; 48 | using CameraManager = libcamera::CameraManager; 49 | using Camera = libcamera::Camera; 50 | using CameraConfiguration = libcamera::CameraConfiguration; 51 | using FrameBufferAllocator = libcamera::FrameBufferAllocator; 52 | using StreamRole = libcamera::StreamRole; 53 | using StreamRoles = std::vector; 54 | using PixelFormat = libcamera::PixelFormat; 55 | using StreamConfiguration = libcamera::StreamConfiguration; 56 | using BufferMap = Request::BufferMap; 57 | using Size = libcamera::Size; 58 | using Rectangle = libcamera::Rectangle; 59 | enum class MsgType 60 | { 61 | RequestComplete, 62 | Quit 63 | }; 64 | typedef std::variant MsgPayload; 65 | struct Msg 66 | { 67 | Msg(MsgType const &t) : type(t) {} 68 | template 69 | Msg(MsgType const &t, T p) : type(t), payload(std::forward(p)) 70 | { 71 | } 72 | MsgType type; 73 | MsgPayload payload; 74 | }; 75 | 76 | // Some flags that can be used to give hints to the camera configuration. 77 | static constexpr unsigned int FLAG_STILL_NONE = 0; 78 | static constexpr unsigned int FLAG_STILL_BGR = 1; // supply BGR images, not YUV 79 | static constexpr unsigned int FLAG_STILL_RGB = 2; // supply RGB images, not YUV 80 | static constexpr unsigned int FLAG_STILL_RAW = 4; // request raw image stream 81 | static constexpr unsigned int FLAG_STILL_DOUBLE_BUFFER = 8; // double-buffer stream 82 | static constexpr unsigned int FLAG_STILL_TRIPLE_BUFFER = 16; // triple-buffer stream 83 | static constexpr unsigned int FLAG_STILL_BUFFER_MASK = 24; // mask for buffer flags 84 | 85 | static constexpr unsigned int FLAG_VIDEO_NONE = 0; 86 | static constexpr unsigned int FLAG_VIDEO_RAW = 1; // request raw image stream 87 | static constexpr unsigned int FLAG_VIDEO_JPEG_COLOURSPACE = 2; // force JPEG colour space 88 | 89 | LibcameraApp(std::unique_ptr const opts = nullptr); 90 | virtual ~LibcameraApp(); 91 | 92 | Options *GetOptions() const { return options_.get(); } 93 | static uint32_t GetNumberCameras(); 94 | 95 | std::string const &CameraId() const; 96 | void OpenCamera(); 97 | void CloseCamera(); 98 | 99 | void ConfigureStill(unsigned int flags = FLAG_STILL_NONE); 100 | void ConfigureViewfinder(); 101 | 102 | void Teardown(); 103 | void StartCamera(); 104 | void StopCamera(); 105 | 106 | void ApplyRoiSettings(); 107 | 108 | Msg Wait(); 109 | void PostMessage(MsgType &t, MsgPayload &p); 110 | 111 | Stream *GetStream(std::string const &name, unsigned int *w = nullptr, unsigned int *h = nullptr, 112 | unsigned int *stride = nullptr) const; 113 | Stream *ViewfinderStream(unsigned int *w = nullptr, unsigned int *h = nullptr, 114 | unsigned int *stride = nullptr) const; 115 | Stream *StillStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 116 | Stream *RawStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 117 | Stream *VideoStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 118 | Stream *LoresStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 119 | Stream *GetMainStream() const; 120 | 121 | std::vector> Mmap(FrameBuffer *buffer) const; 122 | 123 | void SetControls(ControlList &controls); 124 | void StreamDimensions(Stream const *stream, unsigned int *w, unsigned int *h, unsigned int *stride) const; 125 | 126 | protected: 127 | std::unique_ptr options_; 128 | 129 | private: 130 | static std::shared_ptr getCameraManager() { 131 | static std::shared_ptr camera_manager_; 132 | if (!camera_manager_) { 133 | camera_manager_ = std::make_shared(); 134 | int ret = camera_manager_->start(); 135 | if (ret) 136 | throw std::runtime_error("camera manager failed to start," 137 | "code " + std::to_string(-ret)); 138 | } 139 | 140 | return camera_manager_; 141 | } 142 | 143 | template 144 | class MessageQueue 145 | { 146 | public: 147 | template 148 | void Post(U &&msg) 149 | { 150 | std::unique_lock lock(mutex_); 151 | queue_.push(std::forward(msg)); 152 | cond_.notify_one(); 153 | } 154 | T Wait() 155 | { 156 | std::unique_lock lock(mutex_); 157 | cond_.wait(lock, [this] { return !queue_.empty(); }); 158 | T msg = std::move(queue_.front()); 159 | queue_.pop(); 160 | return msg; 161 | } 162 | void Clear() 163 | { 164 | std::unique_lock lock(mutex_); 165 | queue_ = {}; 166 | } 167 | 168 | private: 169 | std::queue queue_; 170 | std::mutex mutex_; 171 | std::condition_variable cond_; 172 | }; 173 | 174 | void setupCapture(); 175 | void makeRequests(); 176 | void queueRequest(CompletedRequest *completed_request); 177 | void requestComplete(Request *request); 178 | void configureDenoise(const std::string &denoise_mode); 179 | 180 | std::shared_ptr camera_; 181 | bool camera_acquired_ = false; 182 | std::unique_ptr configuration_; 183 | std::map>> mapped_buffers_; 184 | std::map streams_; 185 | FrameBufferAllocator *allocator_ = nullptr; 186 | std::map> frame_buffers_; 187 | std::queue free_requests_; 188 | std::vector> requests_; 189 | std::mutex completed_requests_mutex_; 190 | std::set completed_requests_; 191 | bool camera_started_ = false; 192 | std::mutex camera_stop_mutex_; 193 | MessageQueue msg_queue_; 194 | // For setting camera controls. 195 | std::mutex control_mutex_; 196 | ControlList controls_; 197 | // Other: 198 | uint64_t last_timestamp_; 199 | uint64_t sequence_ = 0; 200 | }; 201 | 202 | struct FrameInfo 203 | { 204 | FrameInfo(libcamera::ControlList &ctrls) 205 | : exposure_time(0.0), digital_gain(0.0), colour_gains({ { 0.0f, 0.0f } }), focus(0.0), aelock(false) 206 | { 207 | auto exp = ctrls.get(libcamera::controls::ExposureTime); 208 | if (exp) 209 | exposure_time = *exp; 210 | 211 | auto ag = ctrls.get(libcamera::controls::AnalogueGain); 212 | if (ag) 213 | analogue_gain = *ag; 214 | 215 | auto dg = ctrls.get(libcamera::controls::DigitalGain); 216 | if (dg) 217 | digital_gain = *dg; 218 | 219 | auto cg = ctrls.get(libcamera::controls::ColourGains); 220 | if (cg) 221 | { 222 | colour_gains[0] = (*cg)[0], colour_gains[1] = (*cg)[1]; 223 | } 224 | 225 | auto fom = ctrls.get(libcamera::controls::FocusFoM); 226 | if (fom) 227 | focus = *fom; 228 | 229 | auto ae = ctrls.get(libcamera::controls::AeState); 230 | if (ae) 231 | aelock = (*ae == libcamera::controls::AeStateLocked); 232 | } 233 | 234 | std::string ToString(std::string &info_string) const 235 | { 236 | std::string parsed(info_string); 237 | 238 | for (auto const &t : tokens) 239 | { 240 | std::size_t pos = parsed.find(t); 241 | if (pos != std::string::npos) 242 | { 243 | std::stringstream value; 244 | value << std::fixed << std::setprecision(2); 245 | 246 | if (t == "%frame") 247 | value << sequence; 248 | else if (t == "%fps") 249 | value << fps; 250 | else if (t == "%exp") 251 | value << exposure_time; 252 | else if (t == "%ag") 253 | value << analogue_gain; 254 | else if (t == "%dg") 255 | value << digital_gain; 256 | else if (t == "%rg") 257 | value << colour_gains[0]; 258 | else if (t == "%bg") 259 | value << colour_gains[1]; 260 | else if (t == "%focus") 261 | value << focus; 262 | else if (t == "%aelock") 263 | value << aelock; 264 | 265 | parsed.replace(pos, t.length(), value.str()); 266 | } 267 | } 268 | 269 | return parsed; 270 | } 271 | 272 | unsigned int sequence; 273 | float exposure_time; 274 | float analogue_gain; 275 | float digital_gain; 276 | std::array colour_gains; 277 | float focus; 278 | float fps; 279 | bool aelock; 280 | 281 | private: 282 | // Info text tokens. 283 | inline static const std::string tokens[] = { "%frame", "%fps", "%exp", "%ag", "%dg", 284 | "%rg", "%bg", "%focus", "%aelock" }; 285 | }; 286 | 287 | class Metadata 288 | { 289 | public: 290 | Metadata() = default; 291 | 292 | Metadata(Metadata const &other) 293 | { 294 | std::scoped_lock other_lock(other.mutex_); 295 | data_ = other.data_; 296 | } 297 | 298 | Metadata(Metadata &&other) 299 | { 300 | std::scoped_lock other_lock(other.mutex_); 301 | data_ = std::move(other.data_); 302 | other.data_.clear(); 303 | } 304 | 305 | template 306 | void Set(std::string const &tag, T &&value) 307 | { 308 | std::scoped_lock lock(mutex_); 309 | data_.insert_or_assign(tag, std::forward(value)); 310 | } 311 | 312 | template 313 | int Get(std::string const &tag, T &value) const 314 | { 315 | std::scoped_lock lock(mutex_); 316 | auto it = data_.find(tag); 317 | if (it == data_.end()) 318 | return -1; 319 | value = std::any_cast(it->second); 320 | return 0; 321 | } 322 | 323 | void Clear() 324 | { 325 | std::scoped_lock lock(mutex_); 326 | data_.clear(); 327 | } 328 | 329 | Metadata &operator=(Metadata const &other) 330 | { 331 | std::scoped_lock lock(mutex_, other.mutex_); 332 | data_ = other.data_; 333 | return *this; 334 | } 335 | 336 | Metadata &operator=(Metadata &&other) 337 | { 338 | std::scoped_lock lock(mutex_, other.mutex_); 339 | data_ = std::move(other.data_); 340 | other.data_.clear(); 341 | return *this; 342 | } 343 | 344 | void Merge(Metadata &other) 345 | { 346 | std::scoped_lock lock(mutex_, other.mutex_); 347 | data_.merge(other.data_); 348 | } 349 | 350 | template 351 | T *GetLocked(std::string const &tag) 352 | { 353 | // This allows in-place access to the Metadata contents, 354 | // for which you should be holding the lock. 355 | auto it = data_.find(tag); 356 | if (it == data_.end()) 357 | return nullptr; 358 | return std::any_cast(&it->second); 359 | } 360 | 361 | template 362 | void SetLocked(std::string const &tag, T &&value) 363 | { 364 | // Use this only if you're holding the lock yourself. 365 | data_.insert_or_assign(tag, std::forward(value)); 366 | } 367 | 368 | // Note: use of (lowercase) lock and unlock means you can create scoped 369 | // locks with the standard lock classes. 370 | // e.g. std::lock_guard lock(metadata) 371 | void lock() { mutex_.lock(); } 372 | void unlock() { mutex_.unlock(); } 373 | 374 | private: 375 | mutable std::mutex mutex_; 376 | std::map data_; 377 | }; 378 | 379 | struct CompletedRequest 380 | { 381 | using BufferMap = libcamera::Request::BufferMap; 382 | using ControlList = libcamera::ControlList; 383 | using Request = libcamera::Request; 384 | 385 | CompletedRequest(unsigned int seq, Request *r) 386 | : sequence(seq), buffers(r->buffers()), metadata(r->metadata()), request(r) 387 | { 388 | r->reuse(); 389 | } 390 | unsigned int sequence; 391 | BufferMap buffers; 392 | ControlList metadata; 393 | Request *request; 394 | float framerate; 395 | Metadata post_process_metadata; 396 | }; 397 | -------------------------------------------------------------------------------- /include/libcamera_app_options.hpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020, Raspberry Pi (Trading) Ltd. 4 | * 5 | * options.hpp - common program options 6 | */ 7 | 8 | #pragma once 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | enum Exposure_Modes { 20 | EXPOSURE_NORMAL = libcamera::controls::ExposureNormal, 21 | EXPOSURE_SHORT = libcamera::controls::ExposureShort, 22 | EXPOSURE_CUSTOM = libcamera::controls::ExposureCustom 23 | }; 24 | 25 | enum Metering_Modes { 26 | METERING_CENTRE = libcamera::controls::MeteringCentreWeighted, 27 | METERING_SPOT = libcamera::controls::MeteringSpot, 28 | METERING_MATRIX = libcamera::controls::MeteringMatrix, 29 | METERING_CUSTOM = libcamera::controls::MeteringCustom 30 | }; 31 | 32 | enum WhiteBalance_Modes { 33 | WB_AUTO = libcamera::controls::AwbAuto, 34 | WB_NORMAL = libcamera::controls::AwbAuto, 35 | WB_INCANDESCENT = libcamera::controls::AwbIncandescent, 36 | WB_TUNGSTEN = libcamera::controls::AwbTungsten, 37 | WB_FLUORESCENT = libcamera::controls::AwbFluorescent, 38 | WB_INDOOR = libcamera::controls::AwbIndoor, 39 | WB_DAYLIGHT = libcamera::controls::AwbDaylight, 40 | WB_CLOUDY = libcamera::controls::AwbCloudy, 41 | WB_CUSTOM = libcamera::controls::AwbAuto 42 | }; 43 | 44 | class Options 45 | { 46 | public: 47 | Options() 48 | { 49 | timeout=1000; 50 | metering_index = Metering_Modes::METERING_CENTRE; 51 | exposure_index=Exposure_Modes::EXPOSURE_NORMAL; 52 | awb_index=WhiteBalance_Modes::WB_AUTO; 53 | saturation=1.0f; 54 | contrast=1.0f; 55 | sharpness=1.0f; 56 | brightness=0.0f; 57 | shutter=0.0f; 58 | gain=0.0f; 59 | ev=0.0f; 60 | roi_x=roi_y=roi_width=roi_height=0; 61 | awb_gain_r=awb_gain_b=0; 62 | denoise="auto"; 63 | verbose=false; 64 | transform=libcamera::Transform::Identity; 65 | camera=0; 66 | } 67 | 68 | virtual ~Options() {} 69 | 70 | virtual void Print() const; 71 | 72 | void setMetering(Metering_Modes meteringmode){metering_index=meteringmode;} 73 | void setWhiteBalance(WhiteBalance_Modes wb){awb_index = wb;} 74 | void setExposureMode(Exposure_Modes exp){exposure_index = exp;} 75 | 76 | int getExposureMode(){return exposure_index;} 77 | int getMeteringMode(){return metering_index;} 78 | int getWhiteBalance(){return awb_index;} 79 | 80 | bool help; 81 | bool version; 82 | bool list_cameras; 83 | bool verbose; 84 | uint64_t timeout; // in ms 85 | unsigned int photo_width, photo_height; 86 | unsigned int video_width, video_height; 87 | bool rawfull; 88 | libcamera::Transform transform; 89 | float roi_x, roi_y, roi_width, roi_height; 90 | float shutter; 91 | float gain; 92 | float ev; 93 | float awb_gain_r; 94 | float awb_gain_b; 95 | float brightness; 96 | float contrast; 97 | float saturation; 98 | float sharpness; 99 | float framerate; 100 | std::string denoise; 101 | std::string info_text; 102 | unsigned int camera; 103 | 104 | protected: 105 | int metering_index; 106 | int exposure_index; 107 | int awb_index; 108 | 109 | private: 110 | }; 111 | -------------------------------------------------------------------------------- /src/lccv.cpp: -------------------------------------------------------------------------------- 1 | #include "lccv.hpp" 2 | #include 3 | #include 4 | 5 | using namespace cv; 6 | using namespace lccv; 7 | 8 | PiCamera::PiCamera() : PiCamera(0) {} 9 | 10 | PiCamera::PiCamera(uint32_t id) { 11 | app = std::make_unique(std::make_unique()); 12 | options = static_cast(app->GetOptions()); 13 | still_flags = LibcameraApp::FLAG_STILL_NONE; 14 | options->camera = id; 15 | options->photo_width = 4056; 16 | options->photo_height = 3040; 17 | options->video_width = 640; 18 | options->video_height = 480; 19 | options->framerate = 30; 20 | options->denoise = "auto"; 21 | options->timeout = 1000; 22 | options->setMetering(Metering_Modes::METERING_MATRIX); 23 | options->setExposureMode(Exposure_Modes::EXPOSURE_NORMAL); 24 | options->setWhiteBalance(WhiteBalance_Modes::WB_AUTO); 25 | options->contrast = 1.0f; 26 | options->saturation = 1.0f; 27 | still_flags |= LibcameraApp::FLAG_STILL_RGB; 28 | running.store(false, std::memory_order_release);; 29 | frameready.store(false, std::memory_order_release);; 30 | framebuffer=nullptr; 31 | camerastarted=false; 32 | } 33 | 34 | PiCamera::~PiCamera() {} 35 | 36 | void PiCamera::getImage(cv::Mat &frame, CompletedRequestPtr &payload) 37 | { 38 | unsigned int w, h, stride; 39 | libcamera::Stream *stream = app->StillStream(); 40 | app->StreamDimensions(stream, &w, &h, &stride); 41 | const std::vector> mem = 42 | app->Mmap(payload->buffers[stream]); 43 | frame.create(h,w,CV_8UC3); 44 | uint ls = w*3; 45 | uint8_t *ptr = (uint8_t *)mem[0].data(); 46 | for (unsigned int i = 0; i < h; i++, ptr += stride) 47 | { 48 | memcpy(frame.ptr(i),ptr,ls); 49 | } 50 | } 51 | 52 | bool PiCamera::startPhoto() 53 | { 54 | app->OpenCamera(); 55 | app->ConfigureStill(still_flags); 56 | camerastarted=true; 57 | return true; 58 | } 59 | bool PiCamera::stopPhoto() 60 | { 61 | if(camerastarted){ 62 | camerastarted=false; 63 | app->Teardown(); 64 | app->CloseCamera(); 65 | } 66 | return true; 67 | } 68 | 69 | bool PiCamera::capturePhoto(cv::Mat &frame) 70 | { 71 | if(!camerastarted){ 72 | app->OpenCamera(); 73 | app->ConfigureStill(still_flags); 74 | } 75 | app->StartCamera(); 76 | LibcameraApp::Msg msg = app->Wait(); 77 | if (msg.type == LibcameraApp::MsgType::Quit) 78 | return false; 79 | else if (msg.type != LibcameraApp::MsgType::RequestComplete) 80 | return false; 81 | if (app->StillStream()) 82 | { 83 | app->StopCamera(); 84 | getImage(frame, std::get(msg.payload)); 85 | app->Teardown(); 86 | app->CloseCamera(); 87 | } else { 88 | std::cerr<<"Incorrect stream received"<StopCamera(); 91 | if(!camerastarted){ 92 | app->Teardown(); 93 | app->CloseCamera(); 94 | } 95 | } 96 | return true; 97 | } 98 | 99 | bool PiCamera::startVideo() 100 | { 101 | if(camerastarted)stopPhoto(); 102 | if(running.load(std::memory_order_relaxed)){ 103 | std::cerr<<"Video thread already running"; 104 | return false; 105 | } 106 | frameready.store(false, std::memory_order_release); 107 | app->OpenCamera(); 108 | app->ConfigureViewfinder(); 109 | app->StartCamera(); 110 | 111 | int ret = pthread_create(&videothread, NULL, &videoThreadFunc, this); 112 | if (ret != 0) { 113 | std::cerr<<"Error starting video thread"; 114 | return false; 115 | } 116 | return true; 117 | } 118 | 119 | void PiCamera::stopVideo() 120 | { 121 | if(!running)return; 122 | 123 | running.store(false, std::memory_order_release);; 124 | 125 | //join thread 126 | void *status; 127 | int ret = pthread_join(videothread, &status); 128 | if(ret<0) 129 | std::cerr<<"Error joining thread"<StopCamera(); 132 | app->Teardown(); 133 | app->CloseCamera(); 134 | frameready.store(false, std::memory_order_release);; 135 | } 136 | 137 | bool PiCamera::getVideoFrame(cv::Mat &frame, unsigned int timeout) 138 | { 139 | if(!running.load(std::memory_order_acquire))return false; 140 | auto start_time = std::chrono::high_resolution_clock::now(); 141 | bool timeout_reached = false; 142 | timespec req; 143 | req.tv_sec=0; 144 | req.tv_nsec=1000000;//1ms 145 | while((!frameready.load(std::memory_order_acquire))&&(!timeout_reached)){ 146 | nanosleep(&req,NULL); 147 | timeout_reached = (std::chrono::high_resolution_clock::now() - start_time > std::chrono::milliseconds(timeout)); 148 | } 149 | if(frameready.load(std::memory_order_acquire)){ 150 | frame.create(vh,vw,CV_8UC3); 151 | uint ls = vw*3; 152 | mtx.lock(); 153 | uint8_t *ptr = framebuffer; 154 | for (unsigned int i = 0; i < vh; i++, ptr += vstr) 155 | memcpy(frame.ptr(i),ptr,ls); 156 | mtx.unlock(); 157 | frameready.store(false, std::memory_order_release);; 158 | return true; 159 | } 160 | else 161 | return false; 162 | } 163 | 164 | void *PiCamera::videoThreadFunc(void *p) 165 | { 166 | PiCamera *t = (PiCamera *)p; 167 | t->running.store(true, std::memory_order_release); 168 | //allocate framebuffer 169 | //unsigned int vw,vh,vstr; 170 | libcamera::Stream *stream = t->app->ViewfinderStream(&t->vw,&t->vh,&t->vstr); 171 | int buffersize=t->vh*t->vstr; 172 | if(t->framebuffer)delete[] t->framebuffer; 173 | t->framebuffer=new uint8_t[buffersize]; 174 | std::vector> mem; 175 | 176 | //main loop 177 | while(t->running.load(std::memory_order_acquire)){ 178 | LibcameraApp::Msg msg = t->app->Wait(); 179 | if (msg.type == LibcameraApp::MsgType::Quit){ 180 | std::cerr<<"Quit message received"<running.store(false,std::memory_order_release); 182 | } 183 | else if (msg.type != LibcameraApp::MsgType::RequestComplete) 184 | throw std::runtime_error("unrecognised message!"); 185 | 186 | 187 | CompletedRequestPtr payload = std::get(msg.payload); 188 | mem = t->app->Mmap(payload->buffers[stream]); 189 | t->mtx.lock(); 190 | memcpy(t->framebuffer,mem[0].data(),buffersize); 191 | t->mtx.unlock(); 192 | t->frameready.store(true, std::memory_order_release); 193 | } 194 | if(t->framebuffer){ 195 | delete[] t->framebuffer; 196 | t->framebuffer=nullptr; 197 | } 198 | return NULL; 199 | } 200 | 201 | void PiCamera::ApplyZoomOptions() 202 | { 203 | app->ApplyRoiSettings(); 204 | } -------------------------------------------------------------------------------- /src/libcamera_app.cpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2021, Raspberry Pi (Trading) Ltd. 4 | * 5 | * libcamera_app.cpp - base class for libcamera apps. 6 | */ 7 | 8 | #include "libcamera_app.hpp" 9 | #include "libcamera_app_options.hpp" 10 | 11 | LibcameraApp::LibcameraApp(std::unique_ptr opts) 12 | : options_(std::move(opts)), controls_(controls::controls) 13 | 14 | { 15 | if (!options_) 16 | options_ = std::make_unique(); 17 | controls_.clear(); 18 | } 19 | 20 | LibcameraApp::~LibcameraApp() 21 | { 22 | StopCamera(); 23 | Teardown(); 24 | CloseCamera(); 25 | } 26 | 27 | std::string const &LibcameraApp::CameraId() const 28 | { 29 | return camera_->id(); 30 | } 31 | 32 | uint32_t LibcameraApp::GetNumberCameras() { 33 | return getCameraManager()->cameras().size(); 34 | } 35 | 36 | void LibcameraApp::OpenCamera() 37 | { 38 | 39 | if (options_->verbose) 40 | std::cerr << "Opening camera..." << std::endl; 41 | 42 | if (getCameraManager()->cameras().size() == 0) 43 | throw std::runtime_error("no cameras available"); 44 | if (options_->camera >= getCameraManager()->cameras().size()) 45 | throw std::runtime_error("selected camera is not available"); 46 | 47 | std::string const &cam_id = getCameraManager()->cameras()[options_->camera]->id(); 48 | camera_ = getCameraManager()->get(cam_id); 49 | if (!camera_) 50 | throw std::runtime_error("failed to find camera " + cam_id); 51 | 52 | if (camera_->acquire()) 53 | throw std::runtime_error("failed to acquire camera " + cam_id); 54 | camera_acquired_ = true; 55 | 56 | if (options_->verbose) 57 | std::cerr << "Acquired camera " << cam_id << std::endl; 58 | 59 | } 60 | 61 | void LibcameraApp::CloseCamera() 62 | { 63 | if (camera_acquired_) 64 | camera_->release(); 65 | camera_acquired_ = false; 66 | 67 | camera_.reset(); 68 | 69 | if (options_->verbose && !options_->help) 70 | std::cerr << "Camera closed" << std::endl; 71 | } 72 | 73 | void LibcameraApp::ConfigureStill(unsigned int flags) 74 | { 75 | if (options_->verbose) 76 | std::cerr << "Configuring still capture..." << std::endl; 77 | 78 | // Always request a raw stream as this forces the full resolution capture mode. 79 | // (options_->mode can override the choice of camera mode, however.) 80 | StreamRoles stream_roles = { StreamRole::StillCapture, StreamRole::Raw }; 81 | configuration_ = camera_->generateConfiguration(stream_roles); 82 | if (!configuration_) 83 | throw std::runtime_error("failed to generate still capture configuration"); 84 | 85 | // Now we get to override any of the default settings from the options_-> 86 | if (flags & FLAG_STILL_BGR) 87 | configuration_->at(0).pixelFormat = libcamera::formats::BGR888; 88 | else if (flags & FLAG_STILL_RGB) 89 | configuration_->at(0).pixelFormat = libcamera::formats::RGB888; 90 | else 91 | configuration_->at(0).pixelFormat = libcamera::formats::YUV420; 92 | if ((flags & FLAG_STILL_BUFFER_MASK) == FLAG_STILL_DOUBLE_BUFFER) 93 | configuration_->at(0).bufferCount = 2; 94 | else if ((flags & FLAG_STILL_BUFFER_MASK) == FLAG_STILL_TRIPLE_BUFFER) 95 | configuration_->at(0).bufferCount = 3; 96 | if (options_->photo_width) 97 | configuration_->at(0).size.width = options_->photo_width; 98 | if (options_->photo_height) 99 | configuration_->at(0).size.height = options_->photo_height; 100 | 101 | // configuration_->transform = options_->transform; 102 | 103 | //if (have_raw_stream && !options_->rawfull) 104 | { 105 | configuration_->at(1).size.width = configuration_->at(0).size.width; 106 | configuration_->at(1).size.height = configuration_->at(0).size.height; 107 | } 108 | configuration_->at(1).bufferCount = configuration_->at(0).bufferCount; 109 | 110 | configureDenoise(options_->denoise == "auto" ? "cdn_hq" : options_->denoise); 111 | setupCapture(); 112 | 113 | streams_["still"] = configuration_->at(0).stream(); 114 | streams_["raw"] = configuration_->at(1).stream(); 115 | 116 | if (options_->verbose) 117 | std::cerr << "Still capture setup complete" << std::endl; 118 | } 119 | 120 | void LibcameraApp::ConfigureViewfinder() 121 | { 122 | if (options_->verbose) 123 | std::cerr << "Configuring viewfinder..." << std::endl; 124 | 125 | StreamRoles stream_roles = { StreamRole::Viewfinder }; 126 | configuration_ = camera_->generateConfiguration(stream_roles); 127 | if (!configuration_) 128 | throw std::runtime_error("failed to generate viewfinder configuration"); 129 | 130 | // Now we get to override any of the default settings from the options_-> 131 | configuration_->at(0).pixelFormat = libcamera::formats::RGB888; 132 | configuration_->at(0).size.width = options_->video_width; 133 | configuration_->at(0).size.height = options_->video_height; 134 | configuration_->at(0).bufferCount = 4; 135 | 136 | // configuration_->transform = options_->transform; 137 | 138 | configureDenoise(options_->denoise == "auto" ? "cdn_off" : options_->denoise); 139 | setupCapture(); 140 | 141 | streams_["viewfinder"] = configuration_->at(0).stream(); 142 | 143 | if (options_->verbose) 144 | std::cerr << "Viewfinder setup complete" << std::endl; 145 | } 146 | 147 | void LibcameraApp::Teardown() 148 | { 149 | if (options_->verbose && !options_->help) 150 | std::cerr << "Tearing down requests, buffers and configuration" << std::endl; 151 | 152 | for (auto &iter : mapped_buffers_) 153 | { 154 | // assert(iter.first->planes().size() == iter.second.size()); 155 | // for (unsigned i = 0; i < iter.first->planes().size(); i++) 156 | for (auto &span : iter.second) 157 | munmap(span.data(), span.size()); 158 | } 159 | mapped_buffers_.clear(); 160 | 161 | delete allocator_; 162 | allocator_ = nullptr; 163 | 164 | configuration_.reset(); 165 | 166 | frame_buffers_.clear(); 167 | 168 | streams_.clear(); 169 | } 170 | 171 | void LibcameraApp::StartCamera() 172 | { 173 | // This makes all the Request objects that we shall need. 174 | makeRequests(); 175 | 176 | // Build a list of initial controls that we must set in the camera before starting it. 177 | // We don't overwrite anything the application may have set before calling us. 178 | if (!controls_.get(controls::ScalerCrop) && options_->roi_width != 0 && options_->roi_height != 0) 179 | { 180 | Rectangle sensor_area = *camera_->properties().get(properties::ScalerCropMaximum); 181 | int x = options_->roi_x * sensor_area.width; 182 | int y = options_->roi_y * sensor_area.height; 183 | int w = options_->roi_width * sensor_area.width; 184 | int h = options_->roi_height * sensor_area.height; 185 | Rectangle crop(x, y, w, h); 186 | crop.translateBy(sensor_area.topLeft()); 187 | if (options_->verbose) 188 | std::cerr << "Using crop " << crop.toString() << std::endl; 189 | controls_.set(controls::ScalerCrop, crop); 190 | } 191 | 192 | // Framerate is a bit weird. If it was set programmatically, we go with that, but 193 | // otherwise it applies only to preview/video modes. For stills capture we set it 194 | // as long as possible so that we get whatever the exposure profile wants. 195 | if (!controls_.get(controls::FrameDurationLimits)) 196 | { 197 | if (StillStream()) 198 | controls_.set(controls::FrameDurationLimits, libcamera::Span({ INT64_C(100), INT64_C(1000000000) })); 199 | else if (options_->framerate > 0) 200 | { 201 | int64_t frame_time = 1000000 / options_->framerate; // in us 202 | controls_.set(controls::FrameDurationLimits, libcamera::Span({ frame_time, frame_time })); 203 | } 204 | } 205 | 206 | if (!controls_.get(controls::ExposureTime) && options_->shutter) 207 | controls_.set(controls::ExposureTime, options_->shutter); 208 | if (!controls_.get(controls::AnalogueGain) && options_->gain) 209 | controls_.set(controls::AnalogueGain, options_->gain); 210 | if (!controls_.get(controls::AeMeteringMode)) 211 | controls_.set(controls::AeMeteringMode, options_->getMeteringMode()); 212 | if (!controls_.get(controls::AeExposureMode)) 213 | controls_.set(controls::AeExposureMode, options_->getExposureMode()); 214 | if (!controls_.get(controls::ExposureValue)) 215 | controls_.set(controls::ExposureValue, options_->ev); 216 | if (!controls_.get(controls::AwbMode)) 217 | controls_.set(controls::AwbMode, options_->getWhiteBalance()); 218 | if (!controls_.get(controls::ColourGains) && options_->awb_gain_r && options_->awb_gain_b) 219 | controls_.set(controls::ColourGains, libcamera::Span({ options_->awb_gain_r, options_->awb_gain_b })); 220 | if (!controls_.get(controls::Brightness)) 221 | controls_.set(controls::Brightness, options_->brightness); 222 | if (!controls_.get(controls::Contrast)) 223 | controls_.set(controls::Contrast, options_->contrast); 224 | if (!controls_.get(controls::Saturation)) 225 | controls_.set(controls::Saturation, options_->saturation); 226 | if (!controls_.get(controls::Sharpness)) 227 | controls_.set(controls::Sharpness, options_->sharpness); 228 | 229 | if (camera_->start(&controls_)) 230 | throw std::runtime_error("failed to start camera"); 231 | controls_.clear(); 232 | camera_started_ = true; 233 | last_timestamp_ = 0; 234 | 235 | camera_->requestCompleted.connect(this, &LibcameraApp::requestComplete); 236 | 237 | for (std::unique_ptr &request : requests_) 238 | { 239 | if (camera_->queueRequest(request.get()) < 0) 240 | throw std::runtime_error("Failed to queue request"); 241 | } 242 | 243 | if (options_->verbose) 244 | std::cerr << "Camera started!" << std::endl; 245 | } 246 | 247 | void LibcameraApp::StopCamera() 248 | { 249 | { 250 | // We don't want QueueRequest to run asynchronously while we stop the camera. 251 | std::lock_guard lock(camera_stop_mutex_); 252 | if (camera_started_) 253 | { 254 | if (camera_->stop()) 255 | throw std::runtime_error("failed to stop camera"); 256 | 257 | camera_started_ = false; 258 | } 259 | } 260 | 261 | if (camera_) 262 | camera_->requestCompleted.disconnect(this, &LibcameraApp::requestComplete); 263 | 264 | // An application might be holding a CompletedRequest, so queueRequest will get 265 | // called to delete it later, but we need to know not to try and re-queue it. 266 | completed_requests_.clear(); 267 | 268 | msg_queue_.Clear(); 269 | 270 | while (!free_requests_.empty()) 271 | free_requests_.pop(); 272 | 273 | requests_.clear(); 274 | 275 | controls_.clear(); // no need for mutex here 276 | 277 | if (options_->verbose && !options_->help) 278 | std::cerr << "Camera stopped!" << std::endl; 279 | } 280 | 281 | void LibcameraApp::ApplyRoiSettings(){ 282 | if (!controls_.get(controls::ScalerCrop) && options_->roi_width != 0 && options_->roi_height != 0) 283 | { 284 | Rectangle sensor_area = *camera_->properties().get(properties::ScalerCropMaximum); 285 | int x = options_->roi_x * sensor_area.width; 286 | int y = options_->roi_y * sensor_area.height; 287 | int w = options_->roi_width * sensor_area.width; 288 | int h = options_->roi_height * sensor_area.height; 289 | Rectangle crop(x, y, w, h); 290 | crop.translateBy(sensor_area.topLeft()); 291 | if (options_->verbose) 292 | std::cerr << "Using crop " << crop.toString() << std::endl; 293 | controls_.set(controls::ScalerCrop, crop); 294 | } 295 | } 296 | 297 | LibcameraApp::Msg LibcameraApp::Wait() 298 | { 299 | return msg_queue_.Wait(); 300 | } 301 | 302 | void LibcameraApp::queueRequest(CompletedRequest *completed_request) 303 | { 304 | BufferMap buffers(std::move(completed_request->buffers)); 305 | 306 | Request *request = completed_request->request; 307 | assert(request); 308 | 309 | // This function may run asynchronously so needs protection from the 310 | // camera stopping at the same time. 311 | std::lock_guard stop_lock(camera_stop_mutex_); 312 | if (!camera_started_) 313 | return; 314 | 315 | // An application could be holding a CompletedRequest while it stops and re-starts 316 | // the camera, after which we don't want to queue another request now. 317 | { 318 | std::lock_guard lock(completed_requests_mutex_); 319 | auto it = completed_requests_.find(completed_request); 320 | delete completed_request; 321 | if (it == completed_requests_.end()) 322 | return; 323 | completed_requests_.erase(it); 324 | } 325 | 326 | for (auto const &p : buffers) 327 | { 328 | if (request->addBuffer(p.first, p.second) < 0) 329 | throw std::runtime_error("failed to add buffer to request in QueueRequest"); 330 | } 331 | 332 | { 333 | std::lock_guard lock(control_mutex_); 334 | request->controls() = std::move(controls_); 335 | } 336 | 337 | if (camera_->queueRequest(request) < 0) 338 | throw std::runtime_error("failed to queue request"); 339 | } 340 | 341 | void LibcameraApp::PostMessage(MsgType &t, MsgPayload &p) 342 | { 343 | msg_queue_.Post(Msg(t, std::move(p))); 344 | } 345 | 346 | libcamera::Stream *LibcameraApp::GetStream(std::string const &name, unsigned int *w, unsigned int *h, 347 | unsigned int *stride) const 348 | { 349 | auto it = streams_.find(name); 350 | if (it == streams_.end()) 351 | return nullptr; 352 | StreamDimensions(it->second, w, h, stride); 353 | return it->second; 354 | } 355 | 356 | libcamera::Stream *LibcameraApp::ViewfinderStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 357 | { 358 | return GetStream("viewfinder", w, h, stride); 359 | } 360 | 361 | libcamera::Stream *LibcameraApp::StillStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 362 | { 363 | return GetStream("still", w, h, stride); 364 | } 365 | 366 | libcamera::Stream *LibcameraApp::RawStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 367 | { 368 | return GetStream("raw", w, h, stride); 369 | } 370 | 371 | libcamera::Stream *LibcameraApp::VideoStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 372 | { 373 | return GetStream("video", w, h, stride); 374 | } 375 | 376 | libcamera::Stream *LibcameraApp::LoresStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 377 | { 378 | return GetStream("lores", w, h, stride); 379 | } 380 | 381 | libcamera::Stream *LibcameraApp::GetMainStream() const 382 | { 383 | for (auto &p : streams_) 384 | { 385 | if (p.first == "viewfinder" || p.first == "still" || p.first == "video") 386 | return p.second; 387 | } 388 | 389 | return nullptr; 390 | } 391 | 392 | std::vector> LibcameraApp::Mmap(FrameBuffer *buffer) const 393 | { 394 | auto item = mapped_buffers_.find(buffer); 395 | if (item == mapped_buffers_.end()) 396 | return {}; 397 | return item->second; 398 | } 399 | 400 | void LibcameraApp::SetControls(ControlList &controls) 401 | { 402 | std::lock_guard lock(control_mutex_); 403 | controls_ = std::move(controls); 404 | } 405 | 406 | void LibcameraApp::StreamDimensions(Stream const *stream, unsigned int *w, unsigned int *h, unsigned int *stride) const 407 | { 408 | StreamConfiguration const &cfg = stream->configuration(); 409 | if (w) 410 | *w = cfg.size.width; 411 | if (h) 412 | *h = cfg.size.height; 413 | if (stride) 414 | *stride = cfg.stride; 415 | } 416 | 417 | void LibcameraApp::setupCapture() 418 | { 419 | // First finish setting up the configuration. 420 | 421 | CameraConfiguration::Status validation = configuration_->validate(); 422 | if (validation == CameraConfiguration::Invalid) 423 | throw std::runtime_error("failed to valid stream configurations"); 424 | else if (validation == CameraConfiguration::Adjusted) 425 | std::cerr << "Stream configuration adjusted" << std::endl; 426 | 427 | if (camera_->configure(configuration_.get()) < 0) 428 | throw std::runtime_error("failed to configure streams"); 429 | if (options_->verbose) 430 | std::cerr << "Camera streams configured" << std::endl; 431 | 432 | // Next allocate all the buffers we need, mmap them and store them on a free list. 433 | 434 | allocator_ = new FrameBufferAllocator(camera_); 435 | for (StreamConfiguration &config : *configuration_) 436 | { 437 | Stream *stream = config.stream(); 438 | 439 | if (allocator_->allocate(stream) < 0) 440 | throw std::runtime_error("failed to allocate capture buffers"); 441 | 442 | for (const std::unique_ptr &buffer : allocator_->buffers(stream)) 443 | { 444 | // "Single plane" buffers appear as multi-plane here, but we can spot them because then 445 | // planes all share the same fd. We accumulate them so as to mmap the buffer only once. 446 | size_t buffer_size = 0; 447 | for (unsigned i = 0; i < buffer->planes().size(); i++) 448 | { 449 | const FrameBuffer::Plane &plane = buffer->planes()[i]; 450 | buffer_size += plane.length; 451 | if (i == buffer->planes().size() - 1 || plane.fd.get() != buffer->planes()[i + 1].fd.get()) 452 | { 453 | void *memory = mmap(NULL, buffer_size, PROT_READ | PROT_WRITE, MAP_SHARED, plane.fd.get(), 0); 454 | mapped_buffers_[buffer.get()].push_back( 455 | libcamera::Span(static_cast(memory), buffer_size)); 456 | buffer_size = 0; 457 | } 458 | } 459 | frame_buffers_[stream].push(buffer.get()); 460 | } 461 | } 462 | if (options_->verbose) 463 | std::cerr << "Buffers allocated and mapped" << std::endl; 464 | 465 | // The requests will be made when StartCamera() is called. 466 | } 467 | 468 | void LibcameraApp::makeRequests() 469 | { 470 | auto free_buffers(frame_buffers_); 471 | while (true) 472 | { 473 | for (StreamConfiguration &config : *configuration_) 474 | { 475 | Stream *stream = config.stream(); 476 | if (stream == configuration_->at(0).stream()) 477 | { 478 | if (free_buffers[stream].empty()) 479 | { 480 | if (options_->verbose) 481 | std::cerr << "Requests created" << std::endl; 482 | return; 483 | } 484 | std::unique_ptr request = camera_->createRequest(); 485 | if (!request) 486 | throw std::runtime_error("failed to make request"); 487 | requests_.push_back(std::move(request)); 488 | } 489 | else if (free_buffers[stream].empty()) 490 | throw std::runtime_error("concurrent streams need matching numbers of buffers"); 491 | 492 | FrameBuffer *buffer = free_buffers[stream].front(); 493 | free_buffers[stream].pop(); 494 | if (requests_.back()->addBuffer(stream, buffer) < 0) 495 | throw std::runtime_error("failed to add buffer to request"); 496 | } 497 | } 498 | } 499 | 500 | void LibcameraApp::requestComplete(Request *request) 501 | { 502 | if (request->status() == Request::RequestCancelled) 503 | return; 504 | 505 | CompletedRequest *r = new CompletedRequest(sequence_++, request); 506 | CompletedRequestPtr payload(r, [this](CompletedRequest *cr) { this->queueRequest(cr); }); 507 | { 508 | std::lock_guard lock(completed_requests_mutex_); 509 | completed_requests_.insert(r); 510 | } 511 | 512 | // We calculate the instantaneous framerate in case anyone wants it. 513 | uint64_t timestamp = payload->buffers.begin()->second->metadata().timestamp; 514 | if (last_timestamp_ == 0 || last_timestamp_ == timestamp) 515 | payload->framerate = 0; 516 | else 517 | payload->framerate = 1e9 / (timestamp - last_timestamp_); 518 | last_timestamp_ = timestamp; 519 | 520 | msg_queue_.Post(Msg(MsgType::RequestComplete, std::move(payload))); 521 | } 522 | 523 | void LibcameraApp::configureDenoise(const std::string &denoise_mode) 524 | { 525 | using namespace libcamera::controls::draft; 526 | 527 | static const std::map denoise_table = { 528 | { "off", NoiseReductionModeOff }, 529 | { "cdn_off", NoiseReductionModeMinimal }, 530 | { "cdn_fast", NoiseReductionModeFast }, 531 | { "cdn_hq", NoiseReductionModeHighQuality } 532 | }; 533 | NoiseReductionModeEnum denoise; 534 | 535 | auto const mode = denoise_table.find(denoise_mode); 536 | if (mode == denoise_table.end()) 537 | throw std::runtime_error("Invalid denoise mode " + denoise_mode); 538 | denoise = mode->second; 539 | 540 | controls_.set(NoiseReductionMode, denoise); 541 | } 542 | -------------------------------------------------------------------------------- /src/libcamera_app_options.cpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020, Raspberry Pi (Trading) Ltd. 4 | * 5 | * options.cpp - common program options helpers 6 | */ 7 | #include "libcamera_app_options.hpp" 8 | 9 | void Options::Print() const 10 | { 11 | std::cerr << "Options:" << std::endl; 12 | std::cerr << " verbose: " << verbose << std::endl; 13 | std::cerr << " info_text:" << info_text << std::endl; 14 | std::cerr << " timeout: " << timeout << std::endl; 15 | std::cerr << " photo resolution: " << photo_width << " x "<< photo_height << std::endl; 16 | std::cerr << " video resolution: " << video_width << " x " << video_height << std::endl; 17 | std::cerr << " rawfull: " << rawfull << std::endl; 18 | std::cerr << " transform: " << transformToString(transform) << std::endl; 19 | if (roi_width == 0 || roi_height == 0) 20 | std::cerr << " roi: all" << std::endl; 21 | else 22 | std::cerr << " roi: " << roi_x << "," << roi_y << "," << roi_width << "," << roi_height << std::endl; 23 | if (shutter) 24 | std::cerr << " shutter: " << shutter << std::endl; 25 | if (gain) 26 | std::cerr << " gain: " << gain << std::endl; 27 | std::cerr << " metering: " << metering_index << std::endl; 28 | std::cerr << " exposure: " << exposure_index << std::endl; 29 | std::cerr << " ev: " << ev << std::endl; 30 | std::cerr << " awb: " << awb_index << std::endl; 31 | if (awb_gain_r && awb_gain_b) 32 | std::cerr << " awb gains: red " << awb_gain_r << " blue " << awb_gain_b << std::endl; 33 | std::cerr << " brightness: " << brightness << std::endl; 34 | std::cerr << " contrast: " << contrast << std::endl; 35 | std::cerr << " saturation: " << saturation << std::endl; 36 | std::cerr << " sharpness: " << sharpness << std::endl; 37 | std::cerr << " framerate: " << framerate << std::endl; 38 | std::cerr << " denoise: " << denoise << std::endl; 39 | } 40 | --------------------------------------------------------------------------------