├── Bullseye_32 ├── CMakeLists.txt ├── QR.cbp └── main.cpp ├── Bullseye_64 ├── CMakeLists.txt ├── QR.cbp └── main.cpp ├── Bullseye_64_LCCV ├── CMakeLists.txt ├── QR.cbp ├── QR.layout ├── include │ ├── lccv.hpp │ ├── libcamera_app.hpp │ └── libcamera_app_options.hpp └── src │ ├── lccv.cpp │ ├── libcamera_app.cpp │ ├── libcamera_app_options.cpp │ └── main.cpp ├── Buster_32 ├── CMakeLists.txt ├── QRpi.cbp └── main.cpp ├── Buster_64 ├── CMakeLists.txt ├── QRpi.cbp └── main.cpp ├── LICENSE └── README.md /Bullseye_32/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0 FATAL_ERROR) 2 | 3 | project(QRpi) 4 | 5 | macro(use_cxx11) 6 | if (CMAKE_VERSION VERSION_LESS "3.1") 7 | if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 8 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 9 | endif () 10 | else () 11 | set (CMAKE_CXX_STANDARD 17) 12 | endif () 13 | endmacro(use_cxx11) 14 | 15 | find_package(OpenCV REQUIRED) 16 | 17 | set(SOURCES ./main.cpp) 18 | 19 | set(EXTRA_LIBS ${OpenCV_LIBS} libgobject-2.0.so libgstreamer-1.0.so libgstapp-1.0.so /usr/local/lib/libzbar.so) 20 | 21 | set(EXECUTABLE_OUTPUT_PATH "./") 22 | 23 | link_directories(/usr/lib/arm-linux-gnueabihf/ /usr/local/lib/) 24 | 25 | include_directories(${OpenCV_INCLUDE_DIRS}) 26 | 27 | add_link_options(-fopenmp -s -ldl -lpthread) 28 | 29 | add_executable(QRpi ${SOURCES}) 30 | 31 | target_link_libraries(QRpi ${EXTRA_LIBS}) -------------------------------------------------------------------------------- /Bullseye_32/QR.cbp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 52 | 53 | -------------------------------------------------------------------------------- /Bullseye_32/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | using namespace std; 9 | 10 | // Create zbar scanner 11 | zbar::ImageScanner scanner; 12 | 13 | struct decodedObject 14 | { 15 | string type; 16 | string data; 17 | vector location; 18 | }; 19 | 20 | // Display barcode and QR code location 21 | void display(cv::Mat &im, vector&decodedObjects) 22 | { 23 | // Loop over all decoded objects 24 | for(size_t i = 0; i points = decodedObjects[i].location; 26 | vector hull; 27 | 28 | // If the points do not form a quad, find convex hull 29 | if(points.size() > 4) cv::convexHull(points, hull); 30 | else hull = points; 31 | 32 | // Number of points in the convex hull 33 | size_t n = hull.size(); 34 | 35 | for(size_t j=0; j&decodedObjects, int nb_frames) 42 | { 43 | // Convert image to grayscale 44 | cv::Mat imGray; 45 | 46 | cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY); 47 | 48 | // Wrap image data in a zbar image 49 | zbar::Image image(im.cols, im.rows, "Y800", (uchar*)imGray.data, im.cols*im.rows); 50 | 51 | // Scan the image for barcodes and QRCodes 52 | int res = scanner.scan(image); 53 | 54 | if (res > 0) { 55 | // Print results 56 | for(zbar::Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol){ 57 | decodedObject obj; 58 | 59 | obj.type = symbol->get_type_name(); 60 | obj.data = symbol->get_data(); 61 | // Obtain location 62 | 63 | for(int i = 0; i< symbol->get_location_size(); i++){ 64 | obj.location.push_back(cv::Point(symbol->get_location_x(i),symbol->get_location_y(i))); 65 | } 66 | decodedObjects.push_back(obj); 67 | 68 | // debug - print type and data 69 | cout << nb_frames << endl; 70 | cout << "Type : " << obj.type << endl; 71 | cout << "Data : " << obj.data << endl << endl; 72 | 73 | } 74 | display(im, decodedObjects); 75 | } 76 | } 77 | /// For the Raspberry Pi 64-bit Bullseye OS 78 | 79 | std::string gstreamer_pipeline(int capture_width, int capture_height, int framerate, int display_width, int display_height) { 80 | return 81 | " libcamerasrc ! video/x-raw, " 82 | " width=(int)" + std::to_string(capture_width) + "," 83 | " height=(int)" + std::to_string(capture_height) + "," 84 | " framerate=(fraction)" + std::to_string(framerate) +"/1 !" 85 | " videoconvert ! videoscale !" 86 | " video/x-raw," 87 | " width=(int)" + std::to_string(display_width) + "," 88 | " height=(int)" + std::to_string(display_height) + " ! appsink"; 89 | } 90 | 91 | int main() 92 | { 93 | int ch=0; 94 | int nb_frames=0; 95 | cv::Mat image; 96 | float f; 97 | float FPS[16]; 98 | int i, Fcnt=0; 99 | chrono::steady_clock::time_point Tbegin, Tend; 100 | 101 | for(i=0;i<16;i++) FPS[i]=0.0; 102 | 103 | //pipeline parameters 104 | //keep this resolution!!! 105 | //it will be cropped to 720x720 106 | int capture_width = 1024; 107 | int capture_height = 768; 108 | int framerate = 15 ; 109 | int display_width = 1024; 110 | int display_height = 768; 111 | 112 | //reset frame average 113 | std::string pipeline = gstreamer_pipeline(capture_width, capture_height, framerate, 114 | display_width, display_height); 115 | std::cout << "Using pipeline: \n\t" << pipeline << "\n\n\n"; 116 | 117 | cv::VideoCapture cap(pipeline, cv::CAP_GSTREAMER); 118 | if(!cap.isOpened()) { 119 | std::cout<<"Failed to open camera."< decodedObjects; 141 | decode(image, decodedObjects, nb_frames++); 142 | 143 | //calculate frame rate (just for your convenience) 144 | Tend = chrono::steady_clock::now(); 145 | f = chrono::duration_cast (Tend - Tbegin).count(); 146 | Tbegin = Tend; 147 | if(f>0.0) FPS[((Fcnt++)&0x0F)]=1000.0/f; 148 | for(f=0.0, i=0;i<16;i++){ f+=FPS[i]; } 149 | putText(image, cv::format("FPS %0.2f", f/16),cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX,0.6, cv::Scalar(0, 0, 255)); 150 | 151 | //show result 152 | cv::imshow("Video",image); 153 | ch=cv::waitKey(10); 154 | } 155 | cap.release(); 156 | cv::destroyWindow("Video"); 157 | return 0; 158 | } 159 | -------------------------------------------------------------------------------- /Bullseye_64/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0 FATAL_ERROR) 2 | 3 | project(QRpi) 4 | 5 | macro(use_cxx11) 6 | if (CMAKE_VERSION VERSION_LESS "3.1") 7 | if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 8 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 9 | endif () 10 | else () 11 | set (CMAKE_CXX_STANDARD 17) 12 | endif () 13 | endmacro(use_cxx11) 14 | 15 | find_package(OpenCV REQUIRED) 16 | 17 | set(SOURCES ./main.cpp) 18 | 19 | set(EXTRA_LIBS ${OpenCV_LIBS} libgobject-2.0.so libgstreamer-1.0.so libgstapp-1.0.so /usr/local/lib/libzbar.so) 20 | 21 | set(EXECUTABLE_OUTPUT_PATH "./") 22 | 23 | link_directories(/usr/lib/aarch64-linux-gnu/ /usr/local/lib/) 24 | 25 | include_directories(${OpenCV_INCLUDE_DIRS}) 26 | 27 | add_link_options(-fopenmp -s -ldl -lpthread) 28 | 29 | add_executable(QRpi ${SOURCES}) 30 | 31 | target_link_libraries(QRpi ${EXTRA_LIBS}) -------------------------------------------------------------------------------- /Bullseye_64/QR.cbp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 52 | 53 | -------------------------------------------------------------------------------- /Bullseye_64/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | using namespace std; 9 | 10 | // Create zbar scanner 11 | zbar::ImageScanner scanner; 12 | 13 | struct decodedObject 14 | { 15 | string type; 16 | string data; 17 | vector location; 18 | }; 19 | 20 | // Display barcode and QR code location 21 | void display(cv::Mat &im, vector&decodedObjects) 22 | { 23 | // Loop over all decoded objects 24 | for(size_t i = 0; i points = decodedObjects[i].location; 26 | vector hull; 27 | 28 | // If the points do not form a quad, find convex hull 29 | if(points.size() > 4) cv::convexHull(points, hull); 30 | else hull = points; 31 | 32 | // Number of points in the convex hull 33 | size_t n = hull.size(); 34 | 35 | for(size_t j=0; j&decodedObjects, int nb_frames) 42 | { 43 | // Convert image to grayscale 44 | cv::Mat imGray; 45 | 46 | cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY); 47 | 48 | // Wrap image data in a zbar image 49 | zbar::Image image(im.cols, im.rows, "Y800", (uchar*)imGray.data, im.cols*im.rows); 50 | 51 | // Scan the image for barcodes and QRCodes 52 | int res = scanner.scan(image); 53 | 54 | if (res > 0) { 55 | // Print results 56 | for(zbar::Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol){ 57 | decodedObject obj; 58 | 59 | obj.type = symbol->get_type_name(); 60 | obj.data = symbol->get_data(); 61 | // Obtain location 62 | 63 | for(int i = 0; i< symbol->get_location_size(); i++){ 64 | obj.location.push_back(cv::Point(symbol->get_location_x(i),symbol->get_location_y(i))); 65 | } 66 | decodedObjects.push_back(obj); 67 | 68 | // debug - print type and data 69 | cout << nb_frames << endl; 70 | cout << "Type : " << obj.type << endl; 71 | cout << "Data : " << obj.data << endl << endl; 72 | 73 | } 74 | display(im, decodedObjects); 75 | } 76 | } 77 | /// For the Raspberry Pi 64-bit Bullseye OS 78 | 79 | std::string gstreamer_pipeline(int capture_width, int capture_height, int framerate, int display_width, int display_height) { 80 | return 81 | " libcamerasrc ! video/x-raw, " 82 | " width=(int)" + std::to_string(capture_width) + "," 83 | " height=(int)" + std::to_string(capture_height) + "," 84 | " framerate=(fraction)" + std::to_string(framerate) +"/1 !" 85 | " videoconvert ! videoscale !" 86 | " video/x-raw," 87 | " width=(int)" + std::to_string(display_width) + "," 88 | " height=(int)" + std::to_string(display_height) + " ! appsink"; 89 | } 90 | 91 | int main() 92 | { 93 | int ch=0; 94 | int nb_frames=0; 95 | cv::Mat image; 96 | float f; 97 | float FPS[16]; 98 | int i, Fcnt=0; 99 | chrono::steady_clock::time_point Tbegin, Tend; 100 | 101 | for(i=0;i<16;i++) FPS[i]=0.0; 102 | 103 | //pipeline parameters 104 | //keep this resolution!!! 105 | //it will be cropped to 720x720 106 | int capture_width = 1024; 107 | int capture_height = 768; 108 | int framerate = 15 ; 109 | int display_width = 1024; 110 | int display_height = 768; 111 | 112 | //reset frame average 113 | std::string pipeline = gstreamer_pipeline(capture_width, capture_height, framerate, 114 | display_width, display_height); 115 | std::cout << "Using pipeline: \n\t" << pipeline << "\n\n\n"; 116 | 117 | cv::VideoCapture cap(pipeline, cv::CAP_GSTREAMER); 118 | if(!cap.isOpened()) { 119 | std::cout<<"Failed to open camera."< decodedObjects; 141 | decode(image, decodedObjects, nb_frames++); 142 | 143 | //calculate frame rate (just for your convenience) 144 | Tend = chrono::steady_clock::now(); 145 | f = chrono::duration_cast (Tend - Tbegin).count(); 146 | Tbegin = Tend; 147 | if(f>0.0) FPS[((Fcnt++)&0x0F)]=1000.0/f; 148 | for(f=0.0, i=0;i<16;i++){ f+=FPS[i]; } 149 | putText(image, cv::format("FPS %0.2f", f/16),cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX,0.6, cv::Scalar(0, 0, 255)); 150 | 151 | //show result 152 | cv::imshow("Video",image); 153 | ch=cv::waitKey(10); 154 | } 155 | cap.release(); 156 | cv::destroyWindow("Video"); 157 | return 0; 158 | } 159 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0 FATAL_ERROR) 2 | 3 | project(QRpi) 4 | 5 | macro(use_cxx11) 6 | if (CMAKE_VERSION VERSION_LESS "3.1") 7 | if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 8 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 9 | endif () 10 | else () 11 | set (CMAKE_CXX_STANDARD 17) 12 | endif () 13 | endmacro(use_cxx11) 14 | 15 | find_package(OpenCV REQUIRED) 16 | 17 | set(SOURCES src/main.cpp src/lccv.cpp src/libcamera_app.cpp src/libcamera_app_options.cpp) 18 | 19 | set(EXTRA_LIBS ${OpenCV_LIBS} libcamera-base.so libcamera.so /usr/local/lib/libzbar.so) 20 | 21 | set(EXECUTABLE_OUTPUT_PATH "./") 22 | 23 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic -Wfatal-errors -Wextra -Wall -std=gnu++17 -fexceptions -ftree-vectorize -Wno-unused-parameter") 24 | 25 | link_directories(/usr/lib/aarch64-linux-gnu/ /usr/local/lib/) 26 | 27 | include_directories(include ${OpenCV_INCLUDE_DIRS} /usr/include/libcamera) 28 | 29 | add_link_options(-fopenmp -s -ldl -lpthread) 30 | 31 | add_executable(QRpi ${SOURCES}) 32 | 33 | target_link_libraries(QRpi ${EXTRA_LIBS}) -------------------------------------------------------------------------------- /Bullseye_64_LCCV/QR.cbp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 63 | 64 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/QR.layout: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/include/lccv.hpp: -------------------------------------------------------------------------------- 1 | #ifndef LCCV_HPP 2 | #define LCCV_HPP 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include "libcamera_app.hpp" 10 | #include "libcamera_app_options.hpp" 11 | 12 | namespace lccv { 13 | 14 | class PiCamera { 15 | public: 16 | PiCamera(); 17 | ~PiCamera(); 18 | 19 | Options *options; 20 | 21 | //Photo mode 22 | bool startPhoto(); 23 | bool capturePhoto(cv::Mat &frame); 24 | bool stopPhoto(); 25 | 26 | //Video mode 27 | bool startVideo(); 28 | bool getVideoFrame(cv::Mat &frame, unsigned int timeout); 29 | void stopVideo(); 30 | 31 | protected: 32 | void run(); 33 | protected: 34 | LibcameraApp *app; 35 | void getImage(cv::Mat &frame, CompletedRequestPtr &payload); 36 | static void *videoThreadFunc(void *p); 37 | pthread_t videothread; 38 | unsigned int still_flags; 39 | unsigned int vw,vh,vstr; 40 | std::atomic running,frameready; 41 | uint8_t *framebuffer; 42 | std::mutex mtx; 43 | bool camerastarted; 44 | }; 45 | 46 | } 47 | #endif 48 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/include/libcamera_app.hpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020-2021, Raspberry Pi (Trading) Ltd. 4 | * 5 | * libcamera_app.hpp - base class for libcamera apps. 6 | */ 7 | 8 | #pragma once 9 | 10 | #include 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | 34 | class Options; 35 | struct CompletedRequest; 36 | using CompletedRequestPtr = std::shared_ptr; 37 | 38 | namespace controls = libcamera::controls; 39 | namespace properties = libcamera::properties; 40 | 41 | class LibcameraApp 42 | { 43 | public: 44 | using Stream = libcamera::Stream; 45 | using FrameBuffer = libcamera::FrameBuffer; 46 | using ControlList = libcamera::ControlList; 47 | using Request = libcamera::Request; 48 | using CameraManager = libcamera::CameraManager; 49 | using Camera = libcamera::Camera; 50 | using CameraConfiguration = libcamera::CameraConfiguration; 51 | using FrameBufferAllocator = libcamera::FrameBufferAllocator; 52 | using StreamRole = libcamera::StreamRole; 53 | using StreamRoles = libcamera::StreamRoles; 54 | using PixelFormat = libcamera::PixelFormat; 55 | using StreamConfiguration = libcamera::StreamConfiguration; 56 | using BufferMap = Request::BufferMap; 57 | using Size = libcamera::Size; 58 | using Rectangle = libcamera::Rectangle; 59 | enum class MsgType 60 | { 61 | RequestComplete, 62 | Quit 63 | }; 64 | typedef std::variant MsgPayload; 65 | struct Msg 66 | { 67 | Msg(MsgType const &t) : type(t) {} 68 | template 69 | Msg(MsgType const &t, T p) : type(t), payload(std::forward(p)) 70 | { 71 | } 72 | MsgType type; 73 | MsgPayload payload; 74 | }; 75 | 76 | // Some flags that can be used to give hints to the camera configuration. 77 | static constexpr unsigned int FLAG_STILL_NONE = 0; 78 | static constexpr unsigned int FLAG_STILL_BGR = 1; // supply BGR images, not YUV 79 | static constexpr unsigned int FLAG_STILL_RGB = 2; // supply RGB images, not YUV 80 | static constexpr unsigned int FLAG_STILL_RAW = 4; // request raw image stream 81 | static constexpr unsigned int FLAG_STILL_DOUBLE_BUFFER = 8; // double-buffer stream 82 | static constexpr unsigned int FLAG_STILL_TRIPLE_BUFFER = 16; // triple-buffer stream 83 | static constexpr unsigned int FLAG_STILL_BUFFER_MASK = 24; // mask for buffer flags 84 | 85 | static constexpr unsigned int FLAG_VIDEO_NONE = 0; 86 | static constexpr unsigned int FLAG_VIDEO_RAW = 1; // request raw image stream 87 | static constexpr unsigned int FLAG_VIDEO_JPEG_COLOURSPACE = 2; // force JPEG colour space 88 | 89 | LibcameraApp(std::unique_ptr const opts = nullptr); 90 | virtual ~LibcameraApp(); 91 | 92 | Options *GetOptions() const { return options_.get(); } 93 | 94 | std::string const &CameraId() const; 95 | void OpenCamera(); 96 | void CloseCamera(); 97 | 98 | void ConfigureStill(unsigned int flags = FLAG_STILL_NONE); 99 | void ConfigureViewfinder(); 100 | 101 | void Teardown(); 102 | void StartCamera(); 103 | void StopCamera(); 104 | 105 | Msg Wait(); 106 | void PostMessage(MsgType &t, MsgPayload &p); 107 | 108 | Stream *GetStream(std::string const &name, unsigned int *w = nullptr, unsigned int *h = nullptr, 109 | unsigned int *stride = nullptr) const; 110 | Stream *ViewfinderStream(unsigned int *w = nullptr, unsigned int *h = nullptr, 111 | unsigned int *stride = nullptr) const; 112 | Stream *StillStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 113 | Stream *RawStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 114 | Stream *VideoStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 115 | Stream *LoresStream(unsigned int *w = nullptr, unsigned int *h = nullptr, unsigned int *stride = nullptr) const; 116 | Stream *GetMainStream() const; 117 | 118 | std::vector> Mmap(FrameBuffer *buffer) const; 119 | 120 | void SetControls(ControlList &controls); 121 | void StreamDimensions(Stream const *stream, unsigned int *w, unsigned int *h, unsigned int *stride) const; 122 | 123 | protected: 124 | std::unique_ptr options_; 125 | 126 | private: 127 | template 128 | class MessageQueue 129 | { 130 | public: 131 | template 132 | void Post(U &&msg) 133 | { 134 | std::unique_lock lock(mutex_); 135 | queue_.push(std::forward(msg)); 136 | cond_.notify_one(); 137 | } 138 | T Wait() 139 | { 140 | std::unique_lock lock(mutex_); 141 | cond_.wait(lock, [this] { return !queue_.empty(); }); 142 | T msg = std::move(queue_.front()); 143 | queue_.pop(); 144 | return msg; 145 | } 146 | void Clear() 147 | { 148 | std::unique_lock lock(mutex_); 149 | queue_ = {}; 150 | } 151 | 152 | private: 153 | std::queue queue_; 154 | std::mutex mutex_; 155 | std::condition_variable cond_; 156 | }; 157 | 158 | void setupCapture(); 159 | void makeRequests(); 160 | void queueRequest(CompletedRequest *completed_request); 161 | void requestComplete(Request *request); 162 | void configureDenoise(const std::string &denoise_mode); 163 | 164 | std::unique_ptr camera_manager_; 165 | std::shared_ptr camera_; 166 | bool camera_acquired_ = false; 167 | std::unique_ptr configuration_; 168 | std::map>> mapped_buffers_; 169 | std::map streams_; 170 | FrameBufferAllocator *allocator_ = nullptr; 171 | std::map> frame_buffers_; 172 | std::queue free_requests_; 173 | std::vector> requests_; 174 | std::mutex completed_requests_mutex_; 175 | std::set completed_requests_; 176 | bool camera_started_ = false; 177 | std::mutex camera_stop_mutex_; 178 | MessageQueue msg_queue_; 179 | // For setting camera controls. 180 | std::mutex control_mutex_; 181 | ControlList controls_; 182 | // Other: 183 | uint64_t last_timestamp_; 184 | uint64_t sequence_ = 0; 185 | }; 186 | 187 | struct FrameInfo 188 | { 189 | FrameInfo(libcamera::ControlList &ctrls) 190 | : exposure_time(0.0), digital_gain(0.0), colour_gains({ { 0.0f, 0.0f } }), focus(0.0), aelock(false) 191 | { 192 | if (ctrls.contains(libcamera::controls::ExposureTime)) 193 | exposure_time = ctrls.get(libcamera::controls::ExposureTime); 194 | 195 | if (ctrls.contains(libcamera::controls::AnalogueGain)) 196 | analogue_gain = ctrls.get(libcamera::controls::AnalogueGain); 197 | 198 | if (ctrls.contains(libcamera::controls::DigitalGain)) 199 | digital_gain = ctrls.get(libcamera::controls::DigitalGain); 200 | 201 | if (ctrls.contains(libcamera::controls::ColourGains)) 202 | { 203 | libcamera::Span gains = ctrls.get(libcamera::controls::ColourGains); 204 | colour_gains[0] = gains[0], colour_gains[1] = gains[1]; 205 | } 206 | 207 | if (ctrls.contains(libcamera::controls::FocusFoM)) 208 | focus = ctrls.get(libcamera::controls::FocusFoM); 209 | 210 | if (ctrls.contains(libcamera::controls::AeLocked)) 211 | aelock = ctrls.get(libcamera::controls::AeLocked); 212 | } 213 | 214 | std::string ToString(std::string &info_string) const 215 | { 216 | std::string parsed(info_string); 217 | 218 | for (auto const &t : tokens) 219 | { 220 | std::size_t pos = parsed.find(t); 221 | if (pos != std::string::npos) 222 | { 223 | std::stringstream value; 224 | value << std::fixed << std::setprecision(2); 225 | 226 | if (t == "%frame") 227 | value << sequence; 228 | else if (t == "%fps") 229 | value << fps; 230 | else if (t == "%exp") 231 | value << exposure_time; 232 | else if (t == "%ag") 233 | value << analogue_gain; 234 | else if (t == "%dg") 235 | value << digital_gain; 236 | else if (t == "%rg") 237 | value << colour_gains[0]; 238 | else if (t == "%bg") 239 | value << colour_gains[1]; 240 | else if (t == "%focus") 241 | value << focus; 242 | else if (t == "%aelock") 243 | value << aelock; 244 | 245 | parsed.replace(pos, t.length(), value.str()); 246 | } 247 | } 248 | 249 | return parsed; 250 | } 251 | 252 | unsigned int sequence; 253 | float exposure_time; 254 | float analogue_gain; 255 | float digital_gain; 256 | std::array colour_gains; 257 | float focus; 258 | float fps; 259 | bool aelock; 260 | 261 | private: 262 | // Info text tokens. 263 | inline static const std::string tokens[] = { "%frame", "%fps", "%exp", "%ag", "%dg", 264 | "%rg", "%bg", "%focus", "%aelock" }; 265 | }; 266 | 267 | class Metadata 268 | { 269 | public: 270 | Metadata() = default; 271 | 272 | Metadata(Metadata const &other) 273 | { 274 | std::scoped_lock other_lock(other.mutex_); 275 | data_ = other.data_; 276 | } 277 | 278 | Metadata(Metadata &&other) 279 | { 280 | std::scoped_lock other_lock(other.mutex_); 281 | data_ = std::move(other.data_); 282 | other.data_.clear(); 283 | } 284 | 285 | template 286 | void Set(std::string const &tag, T &&value) 287 | { 288 | std::scoped_lock lock(mutex_); 289 | data_.insert_or_assign(tag, std::forward(value)); 290 | } 291 | 292 | template 293 | int Get(std::string const &tag, T &value) const 294 | { 295 | std::scoped_lock lock(mutex_); 296 | auto it = data_.find(tag); 297 | if (it == data_.end()) 298 | return -1; 299 | value = std::any_cast(it->second); 300 | return 0; 301 | } 302 | 303 | void Clear() 304 | { 305 | std::scoped_lock lock(mutex_); 306 | data_.clear(); 307 | } 308 | 309 | Metadata &operator=(Metadata const &other) 310 | { 311 | std::scoped_lock lock(mutex_, other.mutex_); 312 | data_ = other.data_; 313 | return *this; 314 | } 315 | 316 | Metadata &operator=(Metadata &&other) 317 | { 318 | std::scoped_lock lock(mutex_, other.mutex_); 319 | data_ = std::move(other.data_); 320 | other.data_.clear(); 321 | return *this; 322 | } 323 | 324 | void Merge(Metadata &other) 325 | { 326 | std::scoped_lock lock(mutex_, other.mutex_); 327 | data_.merge(other.data_); 328 | } 329 | 330 | template 331 | T *GetLocked(std::string const &tag) 332 | { 333 | // This allows in-place access to the Metadata contents, 334 | // for which you should be holding the lock. 335 | auto it = data_.find(tag); 336 | if (it == data_.end()) 337 | return nullptr; 338 | return std::any_cast(&it->second); 339 | } 340 | 341 | template 342 | void SetLocked(std::string const &tag, T &&value) 343 | { 344 | // Use this only if you're holding the lock yourself. 345 | data_.insert_or_assign(tag, std::forward(value)); 346 | } 347 | 348 | // Note: use of (lowercase) lock and unlock means you can create scoped 349 | // locks with the standard lock classes. 350 | // e.g. std::lock_guard lock(metadata) 351 | void lock() { mutex_.lock(); } 352 | void unlock() { mutex_.unlock(); } 353 | 354 | private: 355 | mutable std::mutex mutex_; 356 | std::map data_; 357 | }; 358 | 359 | struct CompletedRequest 360 | { 361 | using BufferMap = libcamera::Request::BufferMap; 362 | using ControlList = libcamera::ControlList; 363 | using Request = libcamera::Request; 364 | 365 | CompletedRequest(unsigned int seq, Request *r) 366 | : sequence(seq), buffers(r->buffers()), metadata(r->metadata()), request(r) 367 | { 368 | r->reuse(); 369 | } 370 | unsigned int sequence; 371 | BufferMap buffers; 372 | ControlList metadata; 373 | Request *request; 374 | float framerate; 375 | Metadata post_process_metadata; 376 | }; 377 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/include/libcamera_app_options.hpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020, Raspberry Pi (Trading) Ltd. 4 | * 5 | * options.hpp - common program options 6 | */ 7 | 8 | #pragma once 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | enum Exposure_Modes { 20 | EXPOSURE_NORMAL = libcamera::controls::ExposureNormal, 21 | EXPOSURE_SHORT = libcamera::controls::ExposureShort, 22 | EXPOSURE_CUSTOM = libcamera::controls::ExposureCustom 23 | }; 24 | 25 | enum Metering_Modes { 26 | METERING_CENTRE = libcamera::controls::MeteringCentreWeighted, 27 | METERING_SPOT = libcamera::controls::MeteringSpot, 28 | METERING_MATRIX = libcamera::controls::MeteringMatrix, 29 | METERING_CUSTOM = libcamera::controls::MeteringCustom 30 | }; 31 | 32 | enum WhiteBalance_Modes { 33 | WB_AUTO = libcamera::controls::AwbAuto, 34 | WB_NORMAL = libcamera::controls::AwbAuto, 35 | WB_INCANDESCENT = libcamera::controls::AwbIncandescent, 36 | WB_TUNGSTEN = libcamera::controls::AwbTungsten, 37 | WB_FLUORESCENT = libcamera::controls::AwbFluorescent, 38 | WB_INDOOR = libcamera::controls::AwbIndoor, 39 | WB_DAYLIGHT = libcamera::controls::AwbDaylight, 40 | WB_CLOUDY = libcamera::controls::AwbCloudy, 41 | WB_CUSTOM = libcamera::controls::AwbAuto 42 | }; 43 | 44 | class Options 45 | { 46 | public: 47 | Options() 48 | { 49 | timeout=1000; 50 | metering_index = Metering_Modes::METERING_CENTRE; 51 | exposure_index=Exposure_Modes::EXPOSURE_NORMAL; 52 | awb_index=WhiteBalance_Modes::WB_AUTO; 53 | saturation=1.0f; 54 | contrast=1.0f; 55 | sharpness=1.0f; 56 | brightness=0.0f; 57 | shutter=0.0f; 58 | gain=0.0f; 59 | ev=0.0f; 60 | roi_x=roi_y=roi_width=roi_height=0; 61 | awb_gain_r=awb_gain_b=0; 62 | denoise="auto"; 63 | verbose=false; 64 | transform=libcamera::Transform::Identity; 65 | camera=0; 66 | } 67 | 68 | virtual ~Options() {} 69 | 70 | virtual void Print() const; 71 | 72 | void setMetering(Metering_Modes meteringmode){metering_index=meteringmode;} 73 | void setWhiteBalance(WhiteBalance_Modes wb){awb_index = wb;} 74 | void setExposureMode(Exposure_Modes exp){exposure_index = exp;} 75 | 76 | int getExposureMode(){return exposure_index;} 77 | int getMeteringMode(){return metering_index;} 78 | int getWhiteBalance(){return awb_index;} 79 | 80 | bool help; 81 | bool version; 82 | bool list_cameras; 83 | bool verbose; 84 | uint64_t timeout; // in ms 85 | unsigned int photo_width, photo_height; 86 | unsigned int video_width, video_height; 87 | bool rawfull; 88 | libcamera::Transform transform; 89 | float roi_x, roi_y, roi_width, roi_height; 90 | float shutter; 91 | float gain; 92 | float ev; 93 | float awb_gain_r; 94 | float awb_gain_b; 95 | float brightness; 96 | float contrast; 97 | float saturation; 98 | float sharpness; 99 | float framerate; 100 | std::string denoise; 101 | std::string info_text; 102 | unsigned int camera; 103 | 104 | protected: 105 | int metering_index; 106 | int exposure_index; 107 | int awb_index; 108 | 109 | private: 110 | }; 111 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/src/lccv.cpp: -------------------------------------------------------------------------------- 1 | #include "lccv.hpp" 2 | #include 3 | #include 4 | 5 | using namespace cv; 6 | using namespace lccv; 7 | 8 | PiCamera::PiCamera() 9 | { 10 | app = new LibcameraApp(std::make_unique()); 11 | options = static_cast(app->GetOptions()); 12 | still_flags = LibcameraApp::FLAG_STILL_NONE; 13 | options->photo_width = 4056; 14 | options->photo_height = 3040; 15 | options->video_width = 640; 16 | options->video_height = 480; 17 | options->framerate = 30; 18 | options->denoise = "auto"; 19 | options->timeout = 1000; 20 | options->setMetering(Metering_Modes::METERING_MATRIX); 21 | options->setExposureMode(Exposure_Modes::EXPOSURE_NORMAL); 22 | options->setWhiteBalance(WhiteBalance_Modes::WB_AUTO); 23 | options->contrast = 1.0f; 24 | options->saturation = 1.0f; 25 | still_flags |= LibcameraApp::FLAG_STILL_RGB; 26 | running.store(false, std::memory_order_release);; 27 | frameready.store(false, std::memory_order_release);; 28 | framebuffer=nullptr; 29 | camerastarted=false; 30 | } 31 | 32 | PiCamera::~PiCamera() 33 | { 34 | delete app; 35 | } 36 | 37 | void PiCamera::getImage(cv::Mat &frame, CompletedRequestPtr &payload) 38 | { 39 | unsigned int w, h, stride; 40 | libcamera::Stream *stream = app->StillStream(); 41 | app->StreamDimensions(stream, &w, &h, &stride); 42 | const std::vector> mem = app->Mmap(payload->buffers[stream]); 43 | frame.create(h,w,CV_8UC3); 44 | uint ls = w*3; 45 | uint8_t *ptr = (uint8_t *)mem[0].data(); 46 | for (unsigned int i = 0; i < h; i++, ptr += stride) 47 | { 48 | memcpy(frame.ptr(i),ptr,ls); 49 | } 50 | } 51 | 52 | bool PiCamera::startPhoto() 53 | { 54 | app->OpenCamera(); 55 | app->ConfigureStill(still_flags); 56 | camerastarted=true; 57 | return true; 58 | } 59 | bool PiCamera::stopPhoto() 60 | { 61 | if(camerastarted){ 62 | camerastarted=false; 63 | app->Teardown(); 64 | app->CloseCamera(); 65 | } 66 | return true; 67 | } 68 | 69 | bool PiCamera::capturePhoto(cv::Mat &frame) 70 | { 71 | if(!camerastarted){ 72 | app->OpenCamera(); 73 | app->ConfigureStill(still_flags); 74 | } 75 | app->StartCamera(); 76 | LibcameraApp::Msg msg = app->Wait(); 77 | if (msg.type == LibcameraApp::MsgType::Quit) 78 | return false; 79 | else if (msg.type != LibcameraApp::MsgType::RequestComplete) 80 | return false; 81 | if (app->StillStream()) 82 | { 83 | app->StopCamera(); 84 | getImage(frame, std::get(msg.payload)); 85 | app->Teardown(); 86 | app->CloseCamera(); 87 | } else { 88 | std::cerr<<"Incorrect stream received"<StopCamera(); 91 | if(!camerastarted){ 92 | app->Teardown(); 93 | app->CloseCamera(); 94 | } 95 | } 96 | return true; 97 | } 98 | 99 | bool PiCamera::startVideo() 100 | { 101 | if(camerastarted)stopPhoto(); 102 | if(running.load(std::memory_order_release)){ 103 | std::cerr<<"Video thread already running"; 104 | return false; 105 | } 106 | frameready.store(false, std::memory_order_release); 107 | app->OpenCamera(); 108 | app->ConfigureViewfinder(); 109 | app->StartCamera(); 110 | 111 | int ret = pthread_create(&videothread, NULL, &videoThreadFunc, this); 112 | if (ret != 0) { 113 | std::cerr<<"Error starting video thread"; 114 | return false; 115 | } 116 | return true; 117 | } 118 | 119 | void PiCamera::stopVideo() 120 | { 121 | if(!running)return; 122 | 123 | running.store(false, std::memory_order_release);; 124 | 125 | //join thread 126 | void *status; 127 | int ret = pthread_join(videothread, &status); 128 | if(ret<0) 129 | std::cerr<<"Error joining thread"<StopCamera(); 132 | app->Teardown(); 133 | app->CloseCamera(); 134 | frameready.store(false, std::memory_order_release);; 135 | } 136 | 137 | bool PiCamera::getVideoFrame(cv::Mat &frame, unsigned int timeout) 138 | { 139 | if(!running.load(std::memory_order_acquire))return false; 140 | auto start_time = std::chrono::high_resolution_clock::now(); 141 | bool timeout_reached = false; 142 | timespec req; 143 | req.tv_sec=0; 144 | req.tv_nsec=1000000;//1ms 145 | while((!frameready.load(std::memory_order_acquire))&&(!timeout_reached)){ 146 | nanosleep(&req,NULL); 147 | timeout_reached = (std::chrono::high_resolution_clock::now() - start_time > std::chrono::milliseconds(timeout)); 148 | } 149 | if(frameready.load(std::memory_order_acquire)){ 150 | frame.create(vh,vw,CV_8UC3); 151 | uint ls = vw*3; 152 | mtx.lock(); 153 | uint8_t *ptr = framebuffer; 154 | for (unsigned int i = 0; i < vh; i++, ptr += vstr) 155 | memcpy(frame.ptr(i),ptr,ls); 156 | mtx.unlock(); 157 | frameready.store(false, std::memory_order_release);; 158 | return true; 159 | } 160 | else 161 | return false; 162 | } 163 | 164 | void *PiCamera::videoThreadFunc(void *p) 165 | { 166 | PiCamera *t = (PiCamera *)p; 167 | t->running.store(true, std::memory_order_release); 168 | //allocate framebuffer 169 | //unsigned int vw,vh,vstr; 170 | libcamera::Stream *stream = t->app->ViewfinderStream(&t->vw,&t->vh,&t->vstr); 171 | int buffersize=t->vh*t->vstr; 172 | if(t->framebuffer)delete[] t->framebuffer; 173 | t->framebuffer=new uint8_t[buffersize]; 174 | std::vector> mem; 175 | 176 | //main loop 177 | while(t->running.load(std::memory_order_acquire)){ 178 | LibcameraApp::Msg msg = t->app->Wait(); 179 | if (msg.type == LibcameraApp::MsgType::Quit){ 180 | std::cerr<<"Quit message received"<running.store(false,std::memory_order_release); 182 | } 183 | else if (msg.type != LibcameraApp::MsgType::RequestComplete) 184 | throw std::runtime_error("unrecognised message!"); 185 | 186 | 187 | CompletedRequestPtr payload = std::get(msg.payload); 188 | mem = t->app->Mmap(payload->buffers[stream]); 189 | t->mtx.lock(); 190 | memcpy(t->framebuffer,mem[0].data(),buffersize); 191 | t->mtx.unlock(); 192 | t->frameready.store(true, std::memory_order_release); 193 | } 194 | if(t->framebuffer){ 195 | delete[] t->framebuffer; 196 | t->framebuffer=nullptr; 197 | } 198 | return NULL; 199 | } 200 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/src/libcamera_app.cpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2021, Raspberry Pi (Trading) Ltd. 4 | * 5 | * libcamera_app.cpp - base class for libcamera apps. 6 | */ 7 | 8 | #include "libcamera_app.hpp" 9 | #include "libcamera_app_options.hpp" 10 | 11 | LibcameraApp::LibcameraApp(std::unique_ptr opts) 12 | : options_(std::move(opts)), controls_(controls::controls) 13 | 14 | { 15 | if (!options_) 16 | options_ = std::make_unique(); 17 | controls_.clear(); 18 | } 19 | 20 | LibcameraApp::~LibcameraApp() 21 | { 22 | StopCamera(); 23 | Teardown(); 24 | CloseCamera(); 25 | } 26 | 27 | std::string const &LibcameraApp::CameraId() const 28 | { 29 | return camera_->id(); 30 | } 31 | 32 | void LibcameraApp::OpenCamera() 33 | { 34 | 35 | if (options_->verbose) 36 | std::cerr << "Opening camera..." << std::endl; 37 | 38 | camera_manager_ = std::make_unique(); 39 | int ret = camera_manager_->start(); 40 | if (ret) 41 | throw std::runtime_error("camera manager failed to start, code " + std::to_string(-ret)); 42 | 43 | if (camera_manager_->cameras().size() == 0) 44 | throw std::runtime_error("no cameras available"); 45 | if (options_->camera >= camera_manager_->cameras().size()) 46 | throw std::runtime_error("selected camera is not available"); 47 | 48 | std::string const &cam_id = camera_manager_->cameras()[options_->camera]->id(); 49 | camera_ = camera_manager_->get(cam_id); 50 | if (!camera_) 51 | throw std::runtime_error("failed to find camera " + cam_id); 52 | 53 | if (camera_->acquire()) 54 | throw std::runtime_error("failed to acquire camera " + cam_id); 55 | camera_acquired_ = true; 56 | 57 | if (options_->verbose) 58 | std::cerr << "Acquired camera " << cam_id << std::endl; 59 | 60 | } 61 | 62 | void LibcameraApp::CloseCamera() 63 | { 64 | if (camera_acquired_) 65 | camera_->release(); 66 | camera_acquired_ = false; 67 | 68 | camera_.reset(); 69 | 70 | camera_manager_.reset(); 71 | 72 | if (options_->verbose && !options_->help) 73 | std::cerr << "Camera closed" << std::endl; 74 | } 75 | 76 | void LibcameraApp::ConfigureStill(unsigned int flags) 77 | { 78 | if (options_->verbose) 79 | std::cerr << "Configuring still capture..." << std::endl; 80 | 81 | // Always request a raw stream as this forces the full resolution capture mode. 82 | // (options_->mode can override the choice of camera mode, however.) 83 | StreamRoles stream_roles = { StreamRole::StillCapture, StreamRole::Raw }; 84 | configuration_ = camera_->generateConfiguration(stream_roles); 85 | if (!configuration_) 86 | throw std::runtime_error("failed to generate still capture configuration"); 87 | 88 | // Now we get to override any of the default settings from the options_-> 89 | if (flags & FLAG_STILL_BGR) 90 | configuration_->at(0).pixelFormat = libcamera::formats::BGR888; 91 | else if (flags & FLAG_STILL_RGB) 92 | configuration_->at(0).pixelFormat = libcamera::formats::RGB888; 93 | else 94 | configuration_->at(0).pixelFormat = libcamera::formats::YUV420; 95 | if ((flags & FLAG_STILL_BUFFER_MASK) == FLAG_STILL_DOUBLE_BUFFER) 96 | configuration_->at(0).bufferCount = 2; 97 | else if ((flags & FLAG_STILL_BUFFER_MASK) == FLAG_STILL_TRIPLE_BUFFER) 98 | configuration_->at(0).bufferCount = 3; 99 | if (options_->photo_width) 100 | configuration_->at(0).size.width = options_->photo_width; 101 | if (options_->photo_height) 102 | configuration_->at(0).size.height = options_->photo_height; 103 | 104 | configuration_->transform = options_->transform; 105 | 106 | //if (have_raw_stream && !options_->rawfull) 107 | { 108 | configuration_->at(1).size.width = configuration_->at(0).size.width; 109 | configuration_->at(1).size.height = configuration_->at(0).size.height; 110 | } 111 | configuration_->at(1).bufferCount = configuration_->at(0).bufferCount; 112 | 113 | configureDenoise(options_->denoise == "auto" ? "cdn_hq" : options_->denoise); 114 | setupCapture(); 115 | 116 | streams_["still"] = configuration_->at(0).stream(); 117 | streams_["raw"] = configuration_->at(1).stream(); 118 | 119 | if (options_->verbose) 120 | std::cerr << "Still capture setup complete" << std::endl; 121 | } 122 | 123 | void LibcameraApp::ConfigureViewfinder() 124 | { 125 | if (options_->verbose) 126 | std::cerr << "Configuring viewfinder..." << std::endl; 127 | 128 | StreamRoles stream_roles = { StreamRole::Viewfinder }; 129 | configuration_ = camera_->generateConfiguration(stream_roles); 130 | if (!configuration_) 131 | throw std::runtime_error("failed to generate viewfinder configuration"); 132 | 133 | // Now we get to override any of the default settings from the options_-> 134 | configuration_->at(0).pixelFormat = libcamera::formats::RGB888; 135 | configuration_->at(0).size.width = options_->video_width; 136 | configuration_->at(0).size.height = options_->video_height; 137 | configuration_->at(0).bufferCount = 4; 138 | 139 | configuration_->transform = options_->transform; 140 | 141 | configureDenoise(options_->denoise == "auto" ? "cdn_off" : options_->denoise); 142 | setupCapture(); 143 | 144 | streams_["viewfinder"] = configuration_->at(0).stream(); 145 | 146 | if (options_->verbose) 147 | std::cerr << "Viewfinder setup complete" << std::endl; 148 | } 149 | 150 | void LibcameraApp::Teardown() 151 | { 152 | if (options_->verbose && !options_->help) 153 | std::cerr << "Tearing down requests, buffers and configuration" << std::endl; 154 | 155 | for (auto &iter : mapped_buffers_) 156 | { 157 | // assert(iter.first->planes().size() == iter.second.size()); 158 | // for (unsigned i = 0; i < iter.first->planes().size(); i++) 159 | for (auto &span : iter.second) 160 | munmap(span.data(), span.size()); 161 | } 162 | mapped_buffers_.clear(); 163 | 164 | delete allocator_; 165 | allocator_ = nullptr; 166 | 167 | configuration_.reset(); 168 | 169 | frame_buffers_.clear(); 170 | 171 | streams_.clear(); 172 | } 173 | 174 | void LibcameraApp::StartCamera() 175 | { 176 | // This makes all the Request objects that we shall need. 177 | makeRequests(); 178 | 179 | // Build a list of initial controls that we must set in the camera before starting it. 180 | // We don't overwrite anything the application may have set before calling us. 181 | if (!controls_.contains(controls::ScalerCrop) && options_->roi_width != 0 && options_->roi_height != 0) 182 | { 183 | Rectangle sensor_area = camera_->properties().get(properties::ScalerCropMaximum); 184 | int x = options_->roi_x * sensor_area.width; 185 | int y = options_->roi_y * sensor_area.height; 186 | int w = options_->roi_width * sensor_area.width; 187 | int h = options_->roi_height * sensor_area.height; 188 | Rectangle crop(x, y, w, h); 189 | crop.translateBy(sensor_area.topLeft()); 190 | if (options_->verbose) 191 | std::cerr << "Using crop " << crop.toString() << std::endl; 192 | controls_.set(controls::ScalerCrop, crop); 193 | } 194 | 195 | // Framerate is a bit weird. If it was set programmatically, we go with that, but 196 | // otherwise it applies only to preview/video modes. For stills capture we set it 197 | // as long as possible so that we get whatever the exposure profile wants. 198 | if (!controls_.contains(controls::FrameDurationLimits)) 199 | { 200 | if (StillStream()) 201 | controls_.set(controls::FrameDurationLimits, { INT64_C(100), INT64_C(1000000000) }); 202 | else if (options_->framerate > 0) 203 | { 204 | int64_t frame_time = 1000000 / options_->framerate; // in us 205 | controls_.set(controls::FrameDurationLimits, { frame_time, frame_time }); 206 | } 207 | } 208 | 209 | if (!controls_.contains(controls::ExposureTime) && options_->shutter) 210 | controls_.set(controls::ExposureTime, options_->shutter); 211 | if (!controls_.contains(controls::AnalogueGain) && options_->gain) 212 | controls_.set(controls::AnalogueGain, options_->gain); 213 | if (!controls_.contains(controls::AeMeteringMode)) 214 | controls_.set(controls::AeMeteringMode, options_->getMeteringMode()); 215 | if (!controls_.contains(controls::AeExposureMode)) 216 | controls_.set(controls::AeExposureMode, options_->getExposureMode()); 217 | if (!controls_.contains(controls::ExposureValue)) 218 | controls_.set(controls::ExposureValue, options_->ev); 219 | if (!controls_.contains(controls::AwbMode)) 220 | controls_.set(controls::AwbMode, options_->getWhiteBalance()); 221 | if (!controls_.contains(controls::ColourGains) && options_->awb_gain_r && options_->awb_gain_b) 222 | controls_.set(controls::ColourGains, { options_->awb_gain_r, options_->awb_gain_b }); 223 | if (!controls_.contains(controls::Brightness)) 224 | controls_.set(controls::Brightness, options_->brightness); 225 | if (!controls_.contains(controls::Contrast)) 226 | controls_.set(controls::Contrast, options_->contrast); 227 | if (!controls_.contains(controls::Saturation)) 228 | controls_.set(controls::Saturation, options_->saturation); 229 | if (!controls_.contains(controls::Sharpness)) 230 | controls_.set(controls::Sharpness, options_->sharpness); 231 | 232 | if (camera_->start(&controls_)) 233 | throw std::runtime_error("failed to start camera"); 234 | controls_.clear(); 235 | camera_started_ = true; 236 | last_timestamp_ = 0; 237 | 238 | camera_->requestCompleted.connect(this, &LibcameraApp::requestComplete); 239 | 240 | for (std::unique_ptr &request : requests_) 241 | { 242 | if (camera_->queueRequest(request.get()) < 0) 243 | throw std::runtime_error("Failed to queue request"); 244 | } 245 | 246 | if (options_->verbose) 247 | std::cerr << "Camera started!" << std::endl; 248 | } 249 | 250 | void LibcameraApp::StopCamera() 251 | { 252 | { 253 | // We don't want QueueRequest to run asynchronously while we stop the camera. 254 | std::lock_guard lock(camera_stop_mutex_); 255 | if (camera_started_) 256 | { 257 | if (camera_->stop()) 258 | throw std::runtime_error("failed to stop camera"); 259 | 260 | camera_started_ = false; 261 | } 262 | } 263 | 264 | if (camera_) 265 | camera_->requestCompleted.disconnect(this, &LibcameraApp::requestComplete); 266 | 267 | // An application might be holding a CompletedRequest, so queueRequest will get 268 | // called to delete it later, but we need to know not to try and re-queue it. 269 | completed_requests_.clear(); 270 | 271 | msg_queue_.Clear(); 272 | 273 | while (!free_requests_.empty()) 274 | free_requests_.pop(); 275 | 276 | requests_.clear(); 277 | 278 | controls_.clear(); // no need for mutex here 279 | 280 | if (options_->verbose && !options_->help) 281 | std::cerr << "Camera stopped!" << std::endl; 282 | } 283 | 284 | LibcameraApp::Msg LibcameraApp::Wait() 285 | { 286 | return msg_queue_.Wait(); 287 | } 288 | 289 | void LibcameraApp::queueRequest(CompletedRequest *completed_request) 290 | { 291 | BufferMap buffers(std::move(completed_request->buffers)); 292 | 293 | Request *request = completed_request->request; 294 | delete completed_request; 295 | assert(request); 296 | 297 | // This function may run asynchronously so needs protection from the 298 | // camera stopping at the same time. 299 | std::lock_guard stop_lock(camera_stop_mutex_); 300 | if (!camera_started_) 301 | return; 302 | 303 | // An application could be holding a CompletedRequest while it stops and re-starts 304 | // the camera, after which we don't want to queue another request now. 305 | { 306 | std::lock_guard lock(completed_requests_mutex_); 307 | auto it = completed_requests_.find(completed_request); 308 | if (it == completed_requests_.end()) 309 | return; 310 | completed_requests_.erase(it); 311 | } 312 | 313 | for (auto const &p : buffers) 314 | { 315 | if (request->addBuffer(p.first, p.second) < 0) 316 | throw std::runtime_error("failed to add buffer to request in QueueRequest"); 317 | } 318 | 319 | { 320 | std::lock_guard lock(control_mutex_); 321 | request->controls() = std::move(controls_); 322 | } 323 | 324 | if (camera_->queueRequest(request) < 0) 325 | throw std::runtime_error("failed to queue request"); 326 | } 327 | 328 | void LibcameraApp::PostMessage(MsgType &t, MsgPayload &p) 329 | { 330 | msg_queue_.Post(Msg(t, std::move(p))); 331 | } 332 | 333 | libcamera::Stream *LibcameraApp::GetStream(std::string const &name, unsigned int *w, unsigned int *h, 334 | unsigned int *stride) const 335 | { 336 | auto it = streams_.find(name); 337 | if (it == streams_.end()) 338 | return nullptr; 339 | StreamDimensions(it->second, w, h, stride); 340 | return it->second; 341 | } 342 | 343 | libcamera::Stream *LibcameraApp::ViewfinderStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 344 | { 345 | return GetStream("viewfinder", w, h, stride); 346 | } 347 | 348 | libcamera::Stream *LibcameraApp::StillStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 349 | { 350 | return GetStream("still", w, h, stride); 351 | } 352 | 353 | libcamera::Stream *LibcameraApp::RawStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 354 | { 355 | return GetStream("raw", w, h, stride); 356 | } 357 | 358 | libcamera::Stream *LibcameraApp::VideoStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 359 | { 360 | return GetStream("video", w, h, stride); 361 | } 362 | 363 | libcamera::Stream *LibcameraApp::LoresStream(unsigned int *w, unsigned int *h, unsigned int *stride) const 364 | { 365 | return GetStream("lores", w, h, stride); 366 | } 367 | 368 | libcamera::Stream *LibcameraApp::GetMainStream() const 369 | { 370 | for (auto &p : streams_) 371 | { 372 | if (p.first == "viewfinder" || p.first == "still" || p.first == "video") 373 | return p.second; 374 | } 375 | 376 | return nullptr; 377 | } 378 | 379 | std::vector> LibcameraApp::Mmap(FrameBuffer *buffer) const 380 | { 381 | auto item = mapped_buffers_.find(buffer); 382 | if (item == mapped_buffers_.end()) 383 | return {}; 384 | return item->second; 385 | } 386 | 387 | void LibcameraApp::SetControls(ControlList &controls) 388 | { 389 | std::lock_guard lock(control_mutex_); 390 | controls_ = std::move(controls); 391 | } 392 | 393 | void LibcameraApp::StreamDimensions(Stream const *stream, unsigned int *w, unsigned int *h, unsigned int *stride) const 394 | { 395 | StreamConfiguration const &cfg = stream->configuration(); 396 | if (w) 397 | *w = cfg.size.width; 398 | if (h) 399 | *h = cfg.size.height; 400 | if (stride) 401 | *stride = cfg.stride; 402 | } 403 | 404 | void LibcameraApp::setupCapture() 405 | { 406 | // First finish setting up the configuration. 407 | 408 | CameraConfiguration::Status validation = configuration_->validate(); 409 | if (validation == CameraConfiguration::Invalid) 410 | throw std::runtime_error("failed to valid stream configurations"); 411 | else if (validation == CameraConfiguration::Adjusted) 412 | std::cerr << "Stream configuration adjusted" << std::endl; 413 | 414 | if (camera_->configure(configuration_.get()) < 0) 415 | throw std::runtime_error("failed to configure streams"); 416 | if (options_->verbose) 417 | std::cerr << "Camera streams configured" << std::endl; 418 | 419 | // Next allocate all the buffers we need, mmap them and store them on a free list. 420 | 421 | allocator_ = new FrameBufferAllocator(camera_); 422 | for (StreamConfiguration &config : *configuration_) 423 | { 424 | Stream *stream = config.stream(); 425 | 426 | if (allocator_->allocate(stream) < 0) 427 | throw std::runtime_error("failed to allocate capture buffers"); 428 | 429 | for (const std::unique_ptr &buffer : allocator_->buffers(stream)) 430 | { 431 | // "Single plane" buffers appear as multi-plane here, but we can spot them because then 432 | // planes all share the same fd. We accumulate them so as to mmap the buffer only once. 433 | size_t buffer_size = 0; 434 | for (unsigned i = 0; i < buffer->planes().size(); i++) 435 | { 436 | const FrameBuffer::Plane &plane = buffer->planes()[i]; 437 | buffer_size += plane.length; 438 | if (i == buffer->planes().size() - 1 || plane.fd.get() != buffer->planes()[i + 1].fd.get()) 439 | { 440 | void *memory = mmap(NULL, buffer_size, PROT_READ | PROT_WRITE, MAP_SHARED, plane.fd.get(), 0); 441 | mapped_buffers_[buffer.get()].push_back( 442 | libcamera::Span(static_cast(memory), buffer_size)); 443 | buffer_size = 0; 444 | } 445 | } 446 | frame_buffers_[stream].push(buffer.get()); 447 | } 448 | } 449 | if (options_->verbose) 450 | std::cerr << "Buffers allocated and mapped" << std::endl; 451 | 452 | // The requests will be made when StartCamera() is called. 453 | } 454 | 455 | void LibcameraApp::makeRequests() 456 | { 457 | auto free_buffers(frame_buffers_); 458 | while (true) 459 | { 460 | for (StreamConfiguration &config : *configuration_) 461 | { 462 | Stream *stream = config.stream(); 463 | if (stream == configuration_->at(0).stream()) 464 | { 465 | if (free_buffers[stream].empty()) 466 | { 467 | if (options_->verbose) 468 | std::cerr << "Requests created" << std::endl; 469 | return; 470 | } 471 | std::unique_ptr request = camera_->createRequest(); 472 | if (!request) 473 | throw std::runtime_error("failed to make request"); 474 | requests_.push_back(std::move(request)); 475 | } 476 | else if (free_buffers[stream].empty()) 477 | throw std::runtime_error("concurrent streams need matching numbers of buffers"); 478 | 479 | FrameBuffer *buffer = free_buffers[stream].front(); 480 | free_buffers[stream].pop(); 481 | if (requests_.back()->addBuffer(stream, buffer) < 0) 482 | throw std::runtime_error("failed to add buffer to request"); 483 | } 484 | } 485 | } 486 | 487 | void LibcameraApp::requestComplete(Request *request) 488 | { 489 | if (request->status() == Request::RequestCancelled) 490 | return; 491 | 492 | CompletedRequest *r = new CompletedRequest(sequence_++, request); 493 | CompletedRequestPtr payload(r, [this](CompletedRequest *cr) { this->queueRequest(cr); }); 494 | { 495 | std::lock_guard lock(completed_requests_mutex_); 496 | completed_requests_.insert(r); 497 | } 498 | 499 | // We calculate the instantaneous framerate in case anyone wants it. 500 | uint64_t timestamp = payload->buffers.begin()->second->metadata().timestamp; 501 | if (last_timestamp_ == 0 || last_timestamp_ == timestamp) 502 | payload->framerate = 0; 503 | else 504 | payload->framerate = 1e9 / (timestamp - last_timestamp_); 505 | last_timestamp_ = timestamp; 506 | 507 | msg_queue_.Post(Msg(MsgType::RequestComplete, std::move(payload))); 508 | } 509 | 510 | void LibcameraApp::configureDenoise(const std::string &denoise_mode) 511 | { 512 | using namespace libcamera::controls::draft; 513 | 514 | static const std::map denoise_table = { 515 | { "off", NoiseReductionModeOff }, 516 | { "cdn_off", NoiseReductionModeMinimal }, 517 | { "cdn_fast", NoiseReductionModeFast }, 518 | { "cdn_hq", NoiseReductionModeHighQuality } 519 | }; 520 | NoiseReductionModeEnum denoise; 521 | 522 | auto const mode = denoise_table.find(denoise_mode); 523 | if (mode == denoise_table.end()) 524 | throw std::runtime_error("Invalid denoise mode " + denoise_mode); 525 | denoise = mode->second; 526 | 527 | controls_.set(NoiseReductionMode, denoise); 528 | } 529 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/src/libcamera_app_options.cpp: -------------------------------------------------------------------------------- 1 | /* SPDX-License-Identifier: BSD-2-Clause */ 2 | /* 3 | * Copyright (C) 2020, Raspberry Pi (Trading) Ltd. 4 | * 5 | * options.cpp - common program options helpers 6 | */ 7 | #include "libcamera_app_options.hpp" 8 | 9 | void Options::Print() const 10 | { 11 | std::cerr << "Options:" << std::endl; 12 | std::cerr << " verbose: " << verbose << std::endl; 13 | std::cerr << " info_text:" << info_text << std::endl; 14 | std::cerr << " timeout: " << timeout << std::endl; 15 | std::cerr << " photo resolution: " << photo_width << " x "<< photo_height << std::endl; 16 | std::cerr << " video resolution: " << video_width << " x " << video_height << std::endl; 17 | std::cerr << " rawfull: " << rawfull << std::endl; 18 | std::cerr << " transform: " << transformToString(transform) << std::endl; 19 | if (roi_width == 0 || roi_height == 0) 20 | std::cerr << " roi: all" << std::endl; 21 | else 22 | std::cerr << " roi: " << roi_x << "," << roi_y << "," << roi_width << "," << roi_height << std::endl; 23 | if (shutter) 24 | std::cerr << " shutter: " << shutter << std::endl; 25 | if (gain) 26 | std::cerr << " gain: " << gain << std::endl; 27 | std::cerr << " metering: " << metering_index << std::endl; 28 | std::cerr << " exposure: " << exposure_index << std::endl; 29 | std::cerr << " ev: " << ev << std::endl; 30 | std::cerr << " awb: " << awb_index << std::endl; 31 | if (awb_gain_r && awb_gain_b) 32 | std::cerr << " awb gains: red " << awb_gain_r << " blue " << awb_gain_b << std::endl; 33 | std::cerr << " brightness: " << brightness << std::endl; 34 | std::cerr << " contrast: " << contrast << std::endl; 35 | std::cerr << " saturation: " << saturation << std::endl; 36 | std::cerr << " sharpness: " << sharpness << std::endl; 37 | std::cerr << " framerate: " << framerate << std::endl; 38 | std::cerr << " denoise: " << denoise << std::endl; 39 | } 40 | -------------------------------------------------------------------------------- /Bullseye_64_LCCV/src/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | using namespace std; 10 | 11 | // Create zbar scanner 12 | zbar::ImageScanner scanner; 13 | 14 | struct decodedObject 15 | { 16 | string type; 17 | string data; 18 | vector location; 19 | }; 20 | 21 | // Display barcode and QR code location 22 | void display(cv::Mat &im, vector&decodedObjects) 23 | { 24 | // Loop over all decoded objects 25 | for(size_t i = 0; i points = decodedObjects[i].location; 27 | vector hull; 28 | 29 | // If the points do not form a quad, find convex hull 30 | if(points.size() > 4) cv::convexHull(points, hull); 31 | else hull = points; 32 | 33 | // Number of points in the convex hull 34 | size_t n = hull.size(); 35 | 36 | for(size_t j=0; j&decodedObjects, int nb_frames) 43 | { 44 | // Convert image to grayscale 45 | cv::Mat imGray; 46 | 47 | cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY); 48 | 49 | // Wrap image data in a zbar image 50 | zbar::Image image(im.cols, im.rows, "Y800", (uchar*)imGray.data, im.cols*im.rows); 51 | 52 | // Scan the image for barcodes and QRCodes 53 | int res = scanner.scan(image); 54 | 55 | if (res > 0) { 56 | // Print results 57 | for(zbar::Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol){ 58 | decodedObject obj; 59 | 60 | obj.type = symbol->get_type_name(); 61 | obj.data = symbol->get_data(); 62 | // Obtain location 63 | 64 | for(int i = 0; i< symbol->get_location_size(); i++){ 65 | obj.location.push_back(cv::Point(symbol->get_location_x(i),symbol->get_location_y(i))); 66 | } 67 | decodedObjects.push_back(obj); 68 | 69 | // debug - print type and data 70 | cout << nb_frames << endl; 71 | cout << "Type : " << obj.type << endl; 72 | cout << "Data : " << obj.data << endl << endl; 73 | 74 | } 75 | display(im, decodedObjects); 76 | } 77 | } 78 | 79 | int main() 80 | { 81 | int ch=0; 82 | int nb_frames=0; 83 | cv::Mat image; 84 | lccv::PiCamera cam; 85 | float f; 86 | float FPS[16]; 87 | int i, Fcnt=0; 88 | chrono::steady_clock::time_point Tbegin, Tend; 89 | 90 | for(i=0;i<16;i++) FPS[i]=0.0; 91 | 92 | cam.options->video_width= 1024; //keep this resolution!!! 93 | cam.options->video_height= 768; //it will be cropped to 720x720 94 | cam.options->framerate=15; 95 | cam.options->verbose=true; 96 | cam.startVideo(); 97 | 98 | // Configure scanner 99 | // see: http://zbar.sourceforge.net/api/zbar_8h.html#f7818ad6458f9f40362eecda97acdcb0 100 | scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 0); 101 | scanner.set_config(zbar::ZBAR_QRCODE, zbar::ZBAR_CFG_ENABLE, 1); 102 | 103 | std::cout<<"Sample program for scanning QR codes"< decodedObjects; 116 | decode(image, decodedObjects, nb_frames++); 117 | 118 | //calculate frame rate (just for your convenience) 119 | Tend = chrono::steady_clock::now(); 120 | f = chrono::duration_cast (Tend - Tbegin).count(); 121 | Tbegin = Tend; 122 | if(f>0.0) FPS[((Fcnt++)&0x0F)]=1000.0/f; 123 | for(f=0.0, i=0;i<16;i++){ f+=FPS[i]; } 124 | putText(image, cv::format("FPS %0.2f", f/16),cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX,0.6, cv::Scalar(0, 0, 255)); 125 | 126 | //show result 127 | cv::imshow("Video",image); 128 | ch=cv::waitKey(10); 129 | } 130 | } 131 | cam.stopVideo(); 132 | cv::destroyWindow("Video"); 133 | return 0; 134 | } 135 | -------------------------------------------------------------------------------- /Buster_32/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0 FATAL_ERROR) 2 | 3 | project(QRpi) 4 | 5 | macro(use_cxx11) 6 | if (CMAKE_VERSION VERSION_LESS "3.1") 7 | if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 8 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 9 | endif () 10 | else () 11 | set (CMAKE_CXX_STANDARD 17) 12 | endif () 13 | endmacro(use_cxx11) 14 | 15 | find_package(OpenCV REQUIRED) 16 | 17 | set(SOURCES ./main.cpp) 18 | 19 | set(EXTRA_LIBS ${OpenCV_LIBS} libgobject-2.0.so libgstreamer-1.0.so libgstapp-1.0.so) 20 | 21 | set(EXECUTABLE_OUTPUT_PATH "./") 22 | 23 | link_directories(/usr/lib/arm-linux-gnueabihf/ /usr/local/lib/) 24 | 25 | include_directories(${OpenCV_INCLUDE_DIRS}) 26 | 27 | add_link_options(-fopenmp -s -ldl -lpthread -lzbar) 28 | 29 | add_executable(QRpi ${SOURCES}) 30 | 31 | target_link_libraries(QRpi ${EXTRA_LIBS}) -------------------------------------------------------------------------------- /Buster_32/QRpi.cbp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 51 | 52 | -------------------------------------------------------------------------------- /Buster_32/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | using namespace std; 9 | 10 | // Create zbar scanner 11 | zbar::ImageScanner scanner; 12 | 13 | struct decodedObject 14 | { 15 | string type; 16 | string data; 17 | vector location; 18 | }; 19 | 20 | // Display barcode and QR code location 21 | void display(cv::Mat &im, vector&decodedObjects) 22 | { 23 | // Loop over all decoded objects 24 | for(size_t i = 0; i points = decodedObjects[i].location; 26 | vector hull; 27 | 28 | // If the points do not form a quad, find convex hull 29 | if(points.size() > 4) cv::convexHull(points, hull); 30 | else hull = points; 31 | 32 | // Number of points in the convex hull 33 | size_t n = hull.size(); 34 | 35 | for(size_t j=0; j&decodedObjects, int nb_frames) 42 | { 43 | // Convert image to grayscale 44 | cv::Mat imGray; 45 | 46 | cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY); 47 | 48 | // Wrap image data in a zbar image 49 | zbar::Image image(im.cols, im.rows, "Y800", (uchar*)imGray.data, im.cols*im.rows); 50 | 51 | // Scan the image for barcodes and QRCodes 52 | int res = scanner.scan(image); 53 | 54 | if (res > 0) { 55 | // Print results 56 | for(zbar::Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol){ 57 | decodedObject obj; 58 | 59 | obj.type = symbol->get_type_name(); 60 | obj.data = symbol->get_data(); 61 | // Obtain location 62 | 63 | for(int i = 0; i< symbol->get_location_size(); i++){ 64 | obj.location.push_back(cv::Point(symbol->get_location_x(i),symbol->get_location_y(i))); 65 | } 66 | decodedObjects.push_back(obj); 67 | 68 | // debug - print type and data 69 | cout << nb_frames << endl; 70 | cout << "Type : " << obj.type << endl; 71 | cout << "Data : " << obj.data << endl << endl; 72 | 73 | } 74 | display(im, decodedObjects); 75 | } 76 | } 77 | 78 | std::string gstreamer_pipeline(int capture_width, int capture_height, int framerate, int display_width, int display_height) { 79 | return 80 | " v4l2src device=/dev/video0 ! video/x-raw, " 81 | " width=(int)" + std::to_string(capture_width) + "," 82 | " height=(int)" + std::to_string(capture_height) + "," 83 | " framerate=(fraction)" + std::to_string(framerate) +"/1 !" 84 | " videoconvert ! videoscale !" 85 | " video/x-raw," 86 | " width=(int)" + std::to_string(display_width) + "," 87 | " height=(int)" + std::to_string(display_height) + " ! appsink"; 88 | } 89 | 90 | int main() 91 | { 92 | int ch=0; 93 | int nb_frames=0; 94 | cv::Mat image; 95 | float f; 96 | float FPS[16]; 97 | int i, Fcnt=0; 98 | chrono::steady_clock::time_point Tbegin, Tend; 99 | 100 | for(i=0;i<16;i++) FPS[i]=0.0; 101 | 102 | //pipeline parameters 103 | //keep this resolution!!! 104 | //it will be cropped to 720x720 105 | int capture_width = 1024; 106 | int capture_height = 768; 107 | int framerate = 15 ; 108 | int display_width = 1024; 109 | int display_height = 768; 110 | 111 | //reset frame average 112 | std::string pipeline = gstreamer_pipeline(capture_width, capture_height, framerate, 113 | display_width, display_height); 114 | std::cout << "Using pipeline: \n\t" << pipeline << "\n\n\n"; 115 | 116 | cv::VideoCapture cap(pipeline, cv::CAP_GSTREAMER); 117 | if(!cap.isOpened()) { 118 | std::cout<<"Failed to open camera."< decodedObjects; 142 | decode(image, decodedObjects, nb_frames++); 143 | 144 | //calculate frame rate (just for your convenience) 145 | Tend = chrono::steady_clock::now(); 146 | f = chrono::duration_cast (Tend - Tbegin).count(); 147 | Tbegin = Tend; 148 | if(f>0.0) FPS[((Fcnt++)&0x0F)]=1000.0/f; 149 | for(f=0.0, i=0;i<16;i++){ f+=FPS[i]; } 150 | putText(image, cv::format("FPS %0.2f", f/16),cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX,0.6, cv::Scalar(0, 0, 255)); 151 | 152 | //show result 153 | cv::imshow("Video",image); 154 | ch=cv::waitKey(10); 155 | } 156 | cap.release(); 157 | cv::destroyWindow("Video"); 158 | return 0; 159 | } 160 | -------------------------------------------------------------------------------- /Buster_64/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0 FATAL_ERROR) 2 | 3 | project(QRpi) 4 | 5 | macro(use_cxx11) 6 | if (CMAKE_VERSION VERSION_LESS "3.1") 7 | if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 8 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 9 | endif () 10 | else () 11 | set (CMAKE_CXX_STANDARD 17) 12 | endif () 13 | endmacro(use_cxx11) 14 | 15 | find_package(OpenCV REQUIRED) 16 | 17 | set(SOURCES ./main.cpp) 18 | 19 | set(EXTRA_LIBS ${OpenCV_LIBS} libgobject-2.0.so libgstreamer-1.0.so libgstapp-1.0.so) 20 | 21 | set(EXECUTABLE_OUTPUT_PATH "./") 22 | 23 | link_directories(/usr/lib/aarch64-linux-gnu/ /usr/local/lib/) 24 | 25 | include_directories(${OpenCV_INCLUDE_DIRS}) 26 | 27 | add_link_options(-fopenmp -s -ldl -lpthread -lzbar) 28 | 29 | add_executable(QRpi ${SOURCES}) 30 | 31 | target_link_libraries(QRpi ${EXTRA_LIBS}) -------------------------------------------------------------------------------- /Buster_64/QRpi.cbp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 51 | 52 | -------------------------------------------------------------------------------- /Buster_64/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | using namespace std; 9 | 10 | // Create zbar scanner 11 | zbar::ImageScanner scanner; 12 | 13 | struct decodedObject 14 | { 15 | string type; 16 | string data; 17 | vector location; 18 | }; 19 | 20 | // Display barcode and QR code location 21 | void display(cv::Mat &im, vector&decodedObjects) 22 | { 23 | // Loop over all decoded objects 24 | for(size_t i = 0; i points = decodedObjects[i].location; 26 | vector hull; 27 | 28 | // If the points do not form a quad, find convex hull 29 | if(points.size() > 4) cv::convexHull(points, hull); 30 | else hull = points; 31 | 32 | // Number of points in the convex hull 33 | size_t n = hull.size(); 34 | 35 | for(size_t j=0; j&decodedObjects, int nb_frames) 42 | { 43 | // Convert image to grayscale 44 | cv::Mat imGray; 45 | 46 | cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY); 47 | 48 | // Wrap image data in a zbar image 49 | zbar::Image image(im.cols, im.rows, "Y800", (uchar*)imGray.data, im.cols*im.rows); 50 | 51 | // Scan the image for barcodes and QRCodes 52 | int res = scanner.scan(image); 53 | 54 | if (res > 0) { 55 | // Print results 56 | for(zbar::Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol){ 57 | decodedObject obj; 58 | 59 | obj.type = symbol->get_type_name(); 60 | obj.data = symbol->get_data(); 61 | // Obtain location 62 | 63 | for(int i = 0; i< symbol->get_location_size(); i++){ 64 | obj.location.push_back(cv::Point(symbol->get_location_x(i),symbol->get_location_y(i))); 65 | } 66 | decodedObjects.push_back(obj); 67 | 68 | // debug - print type and data 69 | cout << nb_frames << endl; 70 | cout << "Type : " << obj.type << endl; 71 | cout << "Data : " << obj.data << endl << endl; 72 | 73 | } 74 | display(im, decodedObjects); 75 | } 76 | } 77 | 78 | std::string gstreamer_pipeline(int capture_width, int capture_height, int framerate, int display_width, int display_height) { 79 | return 80 | " v4l2src device=/dev/video0 ! video/x-raw, " 81 | " width=(int)" + std::to_string(capture_width) + "," 82 | " height=(int)" + std::to_string(capture_height) + "," 83 | " framerate=(fraction)" + std::to_string(framerate) +"/1 !" 84 | " videoconvert ! videoscale !" 85 | " video/x-raw," 86 | " width=(int)" + std::to_string(display_width) + "," 87 | " height=(int)" + std::to_string(display_height) + " ! appsink"; 88 | } 89 | 90 | int main() 91 | { 92 | int ch=0; 93 | int nb_frames=0; 94 | cv::Mat image; 95 | float f; 96 | float FPS[16]; 97 | int i, Fcnt=0; 98 | chrono::steady_clock::time_point Tbegin, Tend; 99 | 100 | for(i=0;i<16;i++) FPS[i]=0.0; 101 | 102 | //pipeline parameters 103 | //keep this resolution!!! 104 | //it will be cropped to 720x720 105 | int capture_width = 1024; 106 | int capture_height = 768; 107 | int framerate = 15 ; 108 | int display_width = 1024; 109 | int display_height = 768; 110 | 111 | //reset frame average 112 | std::string pipeline = gstreamer_pipeline(capture_width, capture_height, framerate, 113 | display_width, display_height); 114 | std::cout << "Using pipeline: \n\t" << pipeline << "\n\n\n"; 115 | 116 | cv::VideoCapture cap(pipeline, cv::CAP_GSTREAMER); 117 | if(!cap.isOpened()) { 118 | std::cout<<"Failed to open camera."< decodedObjects; 142 | decode(image, decodedObjects, nb_frames++); 143 | 144 | //calculate frame rate (just for your convenience) 145 | Tend = chrono::steady_clock::now(); 146 | f = chrono::duration_cast (Tend - Tbegin).count(); 147 | Tbegin = Tend; 148 | if(f>0.0) FPS[((Fcnt++)&0x0F)]=1000.0/f; 149 | for(f=0.0, i=0;i<16;i++){ f+=FPS[i]; } 150 | putText(image, cv::format("FPS %0.2f", f/16),cv::Point(10,20),cv::FONT_HERSHEY_SIMPLEX,0.6, cv::Scalar(0, 0, 255)); 151 | 152 | //show result 153 | cv::imshow("Video",image); 154 | ch=cv::waitKey(10); 155 | } 156 | cap.release(); 157 | cv::destroyWindow("Video"); 158 | return 0; 159 | } 160 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, Q-engineering 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # QR scanner Raspberry Pi 2 | ![output image]( https://qengineering.eu/images/QR.webp ) 3 | ## QR and barcode scanner for the Raspberry Pi 3 and 4 (32 and 64 bit OS).
4 | [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)

5 | Special made for a bare Raspberry Pi see : [Q-engineering computer vision](https://qengineering.eu/computer-vision-with-raspberry-pi-and-alternatives.html). 6 | 7 | 8 | ------------ 9 | 10 | ## Benchmark. 11 | | Operating system | FPS | CPU load | 12 | | ------------- | :-----: | :-----: | 13 | | Bullseye 64-bit LCCV | 13.5 | 34% | 14 | | Bullseye 64-bit | 12.0 | 46% | 15 | | Bullseye 32-bit | 10.0 | 40% | 16 | | Buster 64-bit | 8.7 | 34% | 17 | | Buster 32-bit | 8.0 | 32% | 18 | 19 | All versions work with GStreamer due to its very low latency.
20 | There is also a Bullseye 64-bit version that works with our [LCCV](https://github.com/Qengineering/LCCV).
21 | It outperforms GStreamer in terms of FPS and CPU load. 22 | 23 | ------------ 24 | 25 | ## Dependencies. 26 | To run the application, you have to: 27 | - A raspberry Pi 4 with a 32 or 64-bit operating system. It may have a Buster (Debian 10) or Bullseye (Debian 11) operating system.
28 | - ZBar installed. 29 | - OpenCV installed. [Install OpenCV 4.5](https://qengineering.eu/install-opencv-4.5-on-raspberry-64-os.html)
30 | - A working Raspicam. 31 | 32 | ------------ 33 | 34 | ## Installing ZBar. 35 | You have to install ZBar first. It is a perfect library for scanning QR and barcodes. Much better and faster than the comparable OpenCV module. 36 | ``` 37 | $ sudo apt install autopoint build-essential libv4l-dev libtool 38 | $ git clone -b 0.23.92 https://github.com/mchehab/zbar.git 39 | $ cd zbar 40 | $ autoreconf -vfi 41 | $ ./configure 42 | ``` 43 | ![output image]( https://qengineering.eu/images/QR_build.webp ) 44 | ``` 45 | $ make -j4 46 | $ sudo make install 47 | $ sudo ldconfig 48 | ``` 49 | Note that ZBar tries to work with /dev/video0, which the Bullseye operating system doesn't support (yet).
50 | Since we're only using the decoding part of ZBar and not the ability to capture images, it won't affect our project.
51 | 52 | ### Scanning QR and/or barcodes 53 | In the code you can configure the codes ZBar is trying to decode. 54 | You can enable all possible codes by the single line 55 | ``` 56 | scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 1); 57 | ``` 58 | Or, if you want to scan a specific code, uncheck everything and then enable the one you want 59 | ``` 60 | scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 0); 61 | scanner.set_config(zbar::ZBAR_QRCODE, zbar::ZBAR_CFG_ENABLE, 1); 62 | ``` 63 | More info at [ZBar](http://zbar.sourceforge.net/api/zbar_8h.html#f7818ad6458f9f40362eecda97acdcb0). 64 | 65 | ------------ 66 | 67 | ## Installing the app. 68 | 69 | - Make sure you have OpenCV up and running on your system.
70 | - Choose the folder with your operating system (Buster or BUllseye, 32 or 64 bit).
71 | - Download the files.
72 | - You can either build the app with Code::Blocks (`$ sudo apt-get install codeblocks`) or use CMake.
73 | ### Code::Blocks 74 | Load the project file `QR.cbp` and run the app with F9.
75 | For more info on how to work with the Code::Blocks IDE see our [tutorial](https://qengineering.eu/opencv-c-examples-on-raspberry-pi.html).
76 | ### CMake 77 | ``` 78 | mkdir build 79 | cd build 80 | cmake .. 81 | make 82 | ``` 83 | ![output image]( https://qengineering.eu/images/QRpi_CMake.png )
84 | After the build you find the QRpi app in the buiild folder.
85 | .
86 | ├── build
87 | │   ├── CMakeCache.txt
88 | │   ├── CMakeFiles
89 | │   ├── cmake_install.cmake
90 | │   ├── Makefile
91 | │   └── **_QRpi_**
92 | ├── CMakeLists.txt
93 | ├── main.cpp
94 | └── QRpi.cbp

95 | 96 | ------------ 97 | 98 | ## Final remarks. 99 | 100 | - Keep the camera resolution at 1024x768 as we will crop the images to the required ZBar size of 720x720. 101 | - The standard Raspicam is not the best choice when it comes to scanning small QR codes. With its fixed focus, the camera cannot zoom in very close. If you need to scan tiny QR codes, consider a variable focus camera like [ArduCam's 8Mp](https://www.uctronics.com/arducam-8-mp-sony-imx219-camera-module-with-m12-lens-ls40136-for-raspberry-pi.html). ![output image]( https://qengineering.eu/images/Arducam_M12.png )

102 | 103 | ![output image]( https://qengineering.eu/images/QRsucces.png ) 104 | 105 | --------------------------------------------------------------------------------