├── README.md ├── src ├── imageprocess.h ├── main.cpp ├── settingdialog.h ├── mainwindow.h ├── imageprocess.cpp ├── yolov5.h ├── mainwindow.ui ├── mainwindow.cpp ├── settingdialog.ui ├── settingdialog.cpp └── yolov5.cpp ├── test └── test.cpp ├── camera ├── .gitignore ├── usbhotplug.h ├── jpegwrap.h ├── strings.hpp ├── usbhotplug.cpp ├── jpegwrap.cpp ├── camera.h └── camera.cpp ├── LICENSE └── CMakeLists.txt /README.md: -------------------------------------------------------------------------------- 1 | # V4L2 CAMERA 2 | 3 | ## Requriements 4 | - libyuv 5 | - opencv 6 | - ncnn 7 | ## capabilities 8 | - adjust camera paramemter (brightness, exposure, saturation, white balance etc) 9 | - image processing with opencv 10 | - object detection with yolov5 11 | -------------------------------------------------------------------------------- /src/imageprocess.h: -------------------------------------------------------------------------------- 1 | #ifndef IMAGEPROCESS_H 2 | #define IMAGEPROCESS_H 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "yolov5.h" 9 | 10 | class Imageprocess 11 | { 12 | public: 13 | static void canny(int height, int width, unsigned char* data); 14 | static void laplace(int height, int width, unsigned char* data); 15 | static void yolov5(int height, int width, unsigned char* data); 16 | }; 17 | 18 | #endif // IMAGEPROCESS_H 19 | -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include 3 | #include "yolov5.h" 4 | #include 5 | #include 6 | 7 | int main(int argc, char *argv[]) 8 | { 9 | QApplication a(argc, argv); 10 | /* load model */ 11 | QtConcurrent::run([](){ 12 | bool ret = Yolov5::instance().load("/home/galois/MySpace/model/yolov5s_6.0"); 13 | if (ret == false) { 14 | QMessageBox::warning(nullptr, "Notice", "Failed to load model", QMessageBox::Ok); 15 | } 16 | }); 17 | MainWindow w; 18 | w.show(); 19 | return a.exec(); 20 | } 21 | -------------------------------------------------------------------------------- /src/settingdialog.h: -------------------------------------------------------------------------------- 1 | #ifndef SETTINGDIALOG_H 2 | #define SETTINGDIALOG_H 3 | 4 | #include 5 | #include "camera/camera.h" 6 | 7 | namespace Ui { 8 | class SettingDialog; 9 | } 10 | 11 | class SettingDialog : public QDialog 12 | { 13 | Q_OBJECT 14 | 15 | public: 16 | explicit SettingDialog(Camera::Device *camera_, QWidget *parent = nullptr); 17 | ~SettingDialog(); 18 | void saveParams(const QString &fileName); 19 | bool loadParams(const QString &fileName); 20 | void updateParam(); 21 | void setDefault(); 22 | void dumpParam(); 23 | private: 24 | Ui::SettingDialog *ui; 25 | Camera::Device *camera; 26 | }; 27 | 28 | #endif // SETTINGDIALOG_H 29 | -------------------------------------------------------------------------------- /test/test.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | void test_usbhotplug() 6 | { 7 | UsbHotplug hotplug; 8 | hotplug.registerDevice("1C4F:0043", [](int action) { 9 | if (action == UsbHotplug::ACTION_DEVICE_ATTACHED) { 10 | std::cout<<"device arrived."< 5 | #include 6 | #include 7 | #include "camera/camera.h" 8 | #include "imageprocess.h" 9 | #include "settingdialog.h" 10 | 11 | namespace Ui { 12 | class MainWindow; 13 | } 14 | 15 | class MainWindow : public QMainWindow 16 | { 17 | Q_OBJECT 18 | 19 | public: 20 | explicit MainWindow(QWidget *parent = 0); 21 | ~MainWindow(); 22 | signals: 23 | void sendImage(const QImage &img); 24 | public slots: 25 | void updateImage(const QImage &img); 26 | void enumerateDevice(); 27 | void onDeviceChanged(const QString &path); 28 | void onPixelFormatChanged(const QString &format); 29 | void onResolutionChanged(const QString &res); 30 | protected: 31 | void closeEvent(QCloseEvent *ev) override; 32 | private: 33 | Ui::MainWindow *ui; 34 | Camera::Device *camera; 35 | SettingDialog *dialog; 36 | QString methodName; 37 | }; 38 | 39 | #endif // MAINWINDOW_H 40 | -------------------------------------------------------------------------------- /camera/.gitignore: -------------------------------------------------------------------------------- 1 | # This file is used to ignore files which are generated 2 | # ---------------------------------------------------------------------------- 3 | 4 | *~ 5 | *.autosave 6 | *.a 7 | *.core 8 | *.moc 9 | *.o 10 | *.obj 11 | *.orig 12 | *.rej 13 | *.so 14 | *.so.* 15 | *_pch.h.cpp 16 | *_resource.rc 17 | *.qm 18 | .#* 19 | *.*# 20 | core 21 | !core/ 22 | tags 23 | .DS_Store 24 | .directory 25 | *.debug 26 | Makefile* 27 | *.prl 28 | *.app 29 | moc_*.cpp 30 | ui_*.h 31 | qrc_*.cpp 32 | Thumbs.db 33 | *.res 34 | *.rc 35 | /.qmake.cache 36 | /.qmake.stash 37 | 38 | # qtcreator generated files 39 | *.pro.user* 40 | 41 | # xemacs temporary files 42 | *.flc 43 | 44 | # Vim temporary files 45 | .*.swp 46 | 47 | # Visual Studio generated files 48 | *.ib_pdb_index 49 | *.idb 50 | *.ilk 51 | *.pdb 52 | *.sln 53 | *.suo 54 | *.vcproj 55 | *vcproj.*.*.user 56 | *.ncb 57 | *.sdf 58 | *.opensdf 59 | *.vcxproj 60 | *vcxproj.* 61 | 62 | # MinGW generated files 63 | *.Debug 64 | *.Release 65 | 66 | # Python byte code 67 | *.pyc 68 | 69 | # Binaries 70 | # -------- 71 | *.dll 72 | *.exe 73 | 74 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 WorldEditor50 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /camera/usbhotplug.h: -------------------------------------------------------------------------------- 1 | #ifndef USBHOTPLUG_H 2 | #define USBHOTPLUG_H 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #define MESSAGE_BUFFER_SIZE 2048 11 | 12 | class UsbHotplug 13 | { 14 | public: 15 | enum State { 16 | STATE_NONE = 0, 17 | STATE_RUN, 18 | STATE_TERMINATE 19 | }; 20 | enum Action { 21 | ACTION_NONE = 0, 22 | ACTION_DEVICE_ATTACHED, 23 | ACTION_DEVICE_DETACHED 24 | }; 25 | using FnNotify = std::function; 26 | struct Device { 27 | int flag; 28 | std::string token; 29 | FnNotify notify; 30 | }; 31 | 32 | protected: 33 | int fd; 34 | int state; 35 | std::map deviceMap; 36 | std::mutex mutex; 37 | std::condition_variable condit; 38 | std::thread listenThread; 39 | protected: 40 | void run(); 41 | public: 42 | UsbHotplug(); 43 | /* vidpid: 093A:2510 */ 44 | void registerDevice(const std::string& vidpid, const FnNotify &func); 45 | int start(); 46 | void stop(); 47 | }; 48 | 49 | #endif // USBHOTPLUG_H 50 | -------------------------------------------------------------------------------- /src/imageprocess.cpp: -------------------------------------------------------------------------------- 1 | #include "imageprocess.h" 2 | #include 3 | 4 | void Imageprocess::canny(int height, int width, unsigned char *data) 5 | { 6 | cv::Mat img(height, width, CV_8UC3, data); 7 | cv::Mat gray; 8 | cv::cvtColor(img, gray, cv::COLOR_RGB2GRAY); 9 | cv::blur(gray, gray, cv::Size(3, 3)); 10 | cv::Canny(gray, gray, 60, 120); 11 | cv::cvtColor(gray, img, cv::COLOR_GRAY2RGB); 12 | return; 13 | } 14 | 15 | void Imageprocess::laplace(int height, int width, unsigned char *data) 16 | { 17 | cv::Mat img(height, width, CV_8UC3, data); 18 | cv::Mat gray; 19 | cv::cvtColor(img, gray, cv::COLOR_RGB2GRAY); 20 | cv::GaussianBlur(gray, gray, cv::Size(3, 3), 0); 21 | cv::Mat filterImg; 22 | cv::Laplacian(gray, filterImg, CV_16S, 3); 23 | cv::Mat scaleImg; 24 | cv::convertScaleAbs(filterImg, scaleImg); 25 | cv::cvtColor(scaleImg, img, cv::COLOR_GRAY2RGB); 26 | return ; 27 | } 28 | 29 | void Imageprocess::yolov5(int height, int width, unsigned char *data) 30 | { 31 | cv::Mat img(height, width, CV_8UC3, data); 32 | { 33 | ncnn::MutexLockGuard guard(Yolov5::instance().lock); 34 | std::vector objects; 35 | Yolov5::instance().detect(img, objects); 36 | Yolov5::instance().draw(img, objects); 37 | } 38 | return; 39 | } 40 | 41 | 42 | -------------------------------------------------------------------------------- /camera/jpegwrap.h: -------------------------------------------------------------------------------- 1 | #ifndef JPEGWRAP_H 2 | #define JPEGWRAP_H 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | /* 12 | wrapper of libjpeg examples 13 | origin: 14 | http://www.ijg.org/files/ 15 | 16 | */ 17 | 18 | 19 | class Jpeg 20 | { 21 | public: 22 | struct Error { 23 | struct jpeg_error_mgr pub; 24 | jmp_buf setjmp_buffer; 25 | }; 26 | enum Align { 27 | ALIGN_0 = 0, 28 | ALIGN_4 29 | }; 30 | enum Scale { 31 | SCALE_D1 = 1, 32 | SCALE_D2 = 2, 33 | SCALE_D4 = 4, 34 | SCALE_D8 = 8 35 | }; 36 | public: 37 | static void errorNotify(j_common_ptr cinfo); 38 | static inline int align4(int width, int channel) {return (width*channel+3)/4*4;} 39 | static int encode(uint8_t*& jpeg, std::size_t &totalsize, 40 | uint8_t* img, int w, int h, int rowstride, int quality=90); 41 | static int decode(uint8_t* &rgb, int &w, int &h, 42 | uint8_t *jpeg, std::size_t totalsize, int scale = SCALE_D1, int align=ALIGN_4); 43 | static int load(const char* filename, std::shared_ptr& img, int &h, int &w, int &c); 44 | static int save(const char* filename, uint8_t* img, int h, int w, int c, int quality=90); 45 | }; 46 | 47 | #endif // JPEGWRAP_H 48 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) 2 | 3 | project(camera LANGUAGES CXX) 4 | 5 | set(CMAKE_INCLUDE_CURRENT_DIR ON) 6 | 7 | set(CMAKE_AUTOUIC ON) 8 | set(CMAKE_AUTOMOC ON) 9 | set(CMAKE_AUTORCC ON) 10 | 11 | set(CMAKE_CXX_STANDARD 11) 12 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 13 | 14 | # QtCreator supports the following variables for Android, which are identical to qmake Android variables. 15 | # Check http://doc.qt.io/qt-5/deployment-android.html for more information. 16 | # They need to be set before the find_package(Qt5 ...) call. 17 | 18 | find_package(Qt5 COMPONENTS Xml Concurrent Widgets REQUIRED) 19 | 20 | set(SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src) 21 | set(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/test) 22 | set(CAMERA_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera) 23 | set(LIBRARIES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/3rdParty) 24 | # src 25 | file(GLOB SRC_FILES 26 | ${SRC_DIR}/*.h 27 | ${SRC_DIR}/*.hpp 28 | ${SRC_DIR}/*.cpp 29 | ${SRC_DIR}/*.ui) 30 | # test 31 | file(GLOB TEST_FILES 32 | ${CAMERA_DIR}/usbhotplug.h 33 | ${CAMERA_DIR}/usbhotplug.cpp 34 | ${TEST_DIR}/*.h 35 | ${TEST_DIR}/*.hpp 36 | ${TEST_DIR}/*.cpp) 37 | # camera 38 | file(GLOB CAMERA_FILES 39 | ${CAMERA_DIR}/*.h 40 | ${CAMERA_DIR}/*.hpp 41 | ${CAMERA_DIR}/*.cpp) 42 | list(APPEND SRC_FILES ${CAMERA_FILES}) 43 | # opencv 44 | set(OpenCV_DIR ${LIBRARIES_DIR}/opencv47/lib/cmake/opencv4) 45 | find_package(OpenCV REQUIRED) 46 | if (OpenCV_FOUND) 47 | include_directories(${OpenCV_INCLUDE_DIRS}) 48 | endif() 49 | 50 | # libyuv 51 | include_directories(${LIBRARIES_DIR}/libyuv/include) 52 | include_directories(${LIBRARIES_DIR}/libjpeg/include) 53 | set(LIBYUV_LIBS 54 | ${LIBRARIES_DIR}/libyuv/lib/libyuv.a 55 | ${LIBRARIES_DIR}/libjpeg/lib/libjpeg.a) 56 | # ncnn 57 | set(NCNN_DIR ${LIBRARIES_DIR}/ncnn) 58 | include_directories(${NCNN_DIR}/include) 59 | set(NCNN_STATIC ${NCNN_DIR}/lib/libncnn.a -fopenmp) 60 | # app 61 | add_executable(camera ${SRC_FILES}) 62 | target_link_libraries(camera PRIVATE 63 | Qt5::Widgets 64 | Qt5::Xml 65 | Qt5::Concurrent 66 | ${OpenCV_LIBS} 67 | ${LIBYUV_LIBS} 68 | ${NCNN_STATIC}) 69 | # test 70 | add_executable(test ${TEST_FILES}) 71 | -------------------------------------------------------------------------------- /src/yolov5.h: -------------------------------------------------------------------------------- 1 | #ifndef YOLOV5_H 2 | #define YOLOV5_H 3 | 4 | #include "ncnn/layer.h" 5 | #include "ncnn/net.h" 6 | 7 | #if defined(USE_NCNN_SIMPLEOCV) 8 | #include "simpleocv.h" 9 | #else 10 | #include 11 | #include 12 | #include 13 | #endif 14 | #include 15 | #include 16 | #include 17 | #include 18 | #define YOLOV5_V60 1 //YOLOv5 v6.0 19 | #define MAX_STRIDE 64 20 | 21 | // original pretrained model from https://github.com/ultralytics/yolov5 22 | // the ncnn model https://github.com/nihui/ncnn-assets/tree/master/models 23 | class Yolov5 24 | { 25 | public: 26 | struct Object { 27 | cv::Rect_ rect; 28 | int label; 29 | float prob; 30 | }; 31 | public: 32 | std::vector labels; 33 | ncnn::Mutex lock; 34 | int target_size; 35 | float prob_threshold; 36 | float nms_threshold; 37 | private: 38 | ncnn::Net yolov5; 39 | ncnn::UnlockedPoolAllocator blob_pool_allocator; 40 | ncnn::PoolAllocator workspace_pool_allocator; 41 | public: 42 | static Yolov5& instance() 43 | { 44 | static Yolov5 yolov5; 45 | return yolov5; 46 | } 47 | bool load(const std::string &modelType); 48 | int detect(const cv::Mat& bgr, std::vector& objects); 49 | void draw(cv::Mat &bgr, const std::vector& objects); 50 | private: 51 | static inline float sigmoid(float x) 52 | { 53 | return static_cast(1.f / (1.f + exp(-x))); 54 | } 55 | static inline float intersection_area(const Object& a, const Object& b) 56 | { 57 | cv::Rect_ inter = a.rect & b.rect; 58 | return inter.area(); 59 | } 60 | static void qsort_descent_inplace(std::vector& objects, int left, int right); 61 | static void qsort_descent_inplace(std::vector& objects); 62 | static void generate_proposals(const ncnn::Mat& anchors, 63 | int stride, 64 | const ncnn::Mat& in_pad, 65 | const ncnn::Mat& feat_blob, 66 | float prob_threshold, 67 | std::vector& objects); 68 | static void nms_sorted_bboxes(const std::vector& objects, 69 | std::vector& picked, 70 | float nms_threshold); 71 | private: 72 | Yolov5(); 73 | }; 74 | 75 | #endif // YOLOV5_H 76 | -------------------------------------------------------------------------------- /camera/strings.hpp: -------------------------------------------------------------------------------- 1 | #ifndef STRINGS_HPP 2 | #define STRINGS_HPP 3 | #include 4 | #include 5 | #include 6 | #include 7 | struct Strings { 8 | 9 | static std::string format(int totalsize, const char *formatText, ...) 10 | { 11 | std::string text(totalsize, 0); 12 | va_list ap; 13 | va_start(ap, formatText); 14 | ::vsprintf((char*)text.c_str(), formatText, ap); 15 | va_end(ap); 16 | return text; 17 | } 18 | 19 | static std::string lower(const std::string &s_) 20 | { 21 | std::string s(s_); 22 | for (size_t i = 0; i < s.size(); i++) { 23 | if (s[i] >= 97 && s[i] <= 122) { 24 | s[i] -= 32; 25 | } 26 | } 27 | return s; 28 | } 29 | 30 | static std::string upper(const std::string &s_) 31 | { 32 | std::string s(s_); 33 | for (std::size_t i = 0; i < s.size(); i++) { 34 | if (s[i] >= 65 && s[i] <= 90) { 35 | s[i] += 32; 36 | } 37 | } 38 | return s; 39 | } 40 | 41 | static std::vector split(const std::string& src, const std::string& delim) 42 | { 43 | std::vector elems; 44 | std::size_t pos = 0; 45 | std::size_t len = src.length(); 46 | std::size_t delim_len = delim.length(); 47 | if (delim_len == 0) { 48 | return elems; 49 | } 50 | while (pos < len) { 51 | int find_pos = src.find(delim, pos); 52 | if (find_pos < 0) { 53 | elems.push_back(src.substr(pos, len - pos)); 54 | break; 55 | } 56 | elems.push_back(src.substr(pos, find_pos - pos)); 57 | pos = find_pos + delim_len; 58 | } 59 | return elems; 60 | } 61 | 62 | static int stringToInt(const std::string &text) 63 | { 64 | return std::atoi(text.c_str()); 65 | } 66 | 67 | static char hexCharToInt4(char c) 68 | { 69 | char x = 0; 70 | if (c >= 'A' && c <= 'Z') { 71 | x = c - 'A' + 10; 72 | } else if (c >= 'a' && c <= 'z') { 73 | x = c - 'a' + 10; 74 | } else if (c >= '0' && c <= '9') { 75 | x = c - '0'; 76 | } 77 | return x; 78 | } 79 | 80 | static unsigned char hexStringToInt8(const char* hex) 81 | { 82 | unsigned char x0 = hexCharToInt4(hex[1]); 83 | unsigned char x1 = hexCharToInt4(hex[0]); 84 | return (x1<<4) + x0; 85 | } 86 | 87 | static unsigned short hexStringToInt16(const std::string &hex) 88 | { 89 | unsigned char x0 = hexCharToInt4(hex[3]); 90 | unsigned char x1 = hexCharToInt4(hex[2]); 91 | unsigned char x2 = hexCharToInt4(hex[1]); 92 | unsigned char x3 = hexCharToInt4(hex[0]); 93 | return (x3<<12) + (x2<<8) + (x1<<4) + x0; 94 | } 95 | 96 | 97 | }; 98 | #endif // STRINGS_HPP 99 | -------------------------------------------------------------------------------- /src/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 2000 10 | 800 11 | 12 | 13 | 14 | 15 | 800 16 | 600 17 | 18 | 19 | 20 | MainWindow 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 600 29 | 0 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 350 48 | 0 49 | 50 | 51 | 52 | 53 | 54 | 55 | enum device 56 | 57 | 58 | 59 | 60 | 61 | 62 | method 63 | 64 | 65 | 66 | 67 | 68 | 69 | resolution: 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 0 78 | 0 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | format: 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 0 95 | 0 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 0 105 | 0 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | device: 114 | 115 | 116 | 117 | 118 | 119 | 120 | Qt::Vertical 121 | 122 | 123 | 124 | 20 125 | 40 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | settings 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 0 149 | 0 150 | 2000 151 | 22 152 | 153 | 154 | 155 | 156 | 157 | TopToolBarArea 158 | 159 | 160 | false 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | -------------------------------------------------------------------------------- /src/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | #include 4 | #include 5 | 6 | MainWindow::MainWindow(QWidget *parent) : 7 | QMainWindow(parent), 8 | ui(new Ui::MainWindow), 9 | methodName("none") 10 | { 11 | ui->setupUi(this); 12 | setWindowFlag(Qt::WindowMinMaxButtonsHint); 13 | /* device */ 14 | std::vector devices = Camera::Device::enumerate(); 15 | if (devices.empty()) { 16 | QMessageBox::warning(this, "Notice", "no device"); 17 | return; 18 | } 19 | for (std::size_t i = 0; i < devices.size(); i++) { 20 | ui->deviceComboBox->addItem(QString::fromStdString(devices[i].path)); 21 | } 22 | QString devPath = QString::fromStdString(devices[0].path); 23 | ui->deviceComboBox->setCurrentText(devPath); 24 | 25 | connect(ui->deviceComboBox, &QComboBox::currentTextChanged, 26 | this, &MainWindow::onDeviceChanged); 27 | /* pixel format */ 28 | std::vector pixelFormatList = Camera::Device::getPixelFormatList(devices[0].path); 29 | for (std::size_t i = 0; i < pixelFormatList.size(); i++) { 30 | ui->formatComboBox->addItem(QString::fromStdString(pixelFormatList[i].formatString)); 31 | } 32 | ui->formatComboBox->setCurrentText(CAMERA_PIXELFORMAT_JPEG); 33 | connect(ui->formatComboBox, &QComboBox::currentTextChanged, 34 | this, &MainWindow::onPixelFormatChanged); 35 | /* resolution */ 36 | std::vector res = Camera::Device::getResolutionList(devices[0].path, CAMERA_PIXELFORMAT_JPEG); 37 | for (std::size_t i = 0; i < res.size(); i++) { 38 | ui->resolutionComboBox->addItem(QString::fromStdString(res[i])); 39 | } 40 | connect(ui->resolutionComboBox, &QComboBox::currentTextChanged, 41 | this, &MainWindow::onResolutionChanged); 42 | 43 | connect(ui->enumDeviceBtn, &QPushButton::clicked, 44 | this, &MainWindow::enumerateDevice); 45 | 46 | /* process */ 47 | ui->methodComboBox->addItems(QStringList{"none", "canny", "laplace", "yolov5"}); 48 | connect(ui->methodComboBox, &QComboBox::currentTextChanged, this, [=](const QString &name){ 49 | methodName = name; 50 | }); 51 | methodName = "none"; 52 | ui->methodComboBox->setCurrentText(methodName); 53 | 54 | camera = new Camera::Device(Camera::Decode_SYNC, [this](int h, int w, int c, unsigned char* data){ 55 | if (c == 3) { 56 | if (methodName == "canny") { 57 | Imageprocess::canny(h, w, data); 58 | } else if (methodName == "laplace") { 59 | Imageprocess::laplace(h, w, data); 60 | } else if (methodName == "yolov5") { 61 | Imageprocess::yolov5(h, w, data); 62 | } 63 | emit sendImage(QImage(data, w, h, QImage::Format_RGB888)); 64 | } else if (c == 4) { 65 | emit sendImage(QImage(data, w, h, QImage::Format_ARGB32)); 66 | } 67 | 68 | }); 69 | camera->start(devices[0].path, CAMERA_PIXELFORMAT_JPEG, res[0]); 70 | connect(this, &MainWindow::sendImage, 71 | this, &MainWindow::updateImage, Qt::QueuedConnection); 72 | dialog = new SettingDialog(camera, this); 73 | connect(ui->settingBtn, &QPushButton::clicked, dialog, &SettingDialog::show); 74 | } 75 | 76 | MainWindow::~MainWindow() 77 | { 78 | if (camera != nullptr) { 79 | delete camera; 80 | camera = nullptr; 81 | } 82 | delete ui; 83 | } 84 | 85 | void MainWindow::updateImage(const QImage &img) 86 | { 87 | if (img.isNull()) { 88 | qDebug()<<"invalid image"; 89 | return; 90 | } 91 | QPixmap pixmap = QPixmap::fromImage(img.scaled(ui->cameralabel->size())); 92 | ui->cameralabel->setPixmap(pixmap); 93 | return; 94 | } 95 | 96 | void MainWindow::enumerateDevice() 97 | { 98 | disconnect(ui->deviceComboBox, &QComboBox::currentTextChanged, 99 | this, &MainWindow::onDeviceChanged); 100 | 101 | std::vector devices = Camera::Device::enumerate(); 102 | if (devices.empty()) { 103 | QMessageBox::warning(this, "Notice", "no device"); 104 | return; 105 | } 106 | for (std::size_t i = 0; i < devices.size(); i++) { 107 | ui->deviceComboBox->addItem(QString::fromStdString(devices[i].path)); 108 | } 109 | QString devPath = QString::fromStdString(devices[0].path); 110 | ui->deviceComboBox->setCurrentText(devPath); 111 | 112 | connect(ui->deviceComboBox, &QComboBox::currentTextChanged, 113 | this, &MainWindow::onDeviceChanged); 114 | return; 115 | } 116 | 117 | void MainWindow::onDeviceChanged(const QString &path) 118 | { 119 | camera->stop(); 120 | camera->clear(); 121 | /* pixel format */ 122 | disconnect(ui->formatComboBox, &QComboBox::currentTextChanged, 123 | this, &MainWindow::onPixelFormatChanged); 124 | 125 | ui->formatComboBox->clear(); 126 | std::vector pixelFormatList = Camera::Device::getPixelFormatList(path.toStdString()); 127 | for (std::size_t i = 0; i < pixelFormatList.size(); i++) { 128 | ui->formatComboBox->addItem(QString::fromStdString(pixelFormatList[i].formatString)); 129 | } 130 | connect(ui->formatComboBox, &QComboBox::currentTextChanged, 131 | this, &MainWindow::onPixelFormatChanged); 132 | /* resolution */ 133 | ui->resolutionComboBox->clear(); 134 | disconnect(ui->resolutionComboBox, &QComboBox::currentTextChanged, 135 | this, &MainWindow::onResolutionChanged); 136 | 137 | std::vector res = Camera::Device::getResolutionList(path.toStdString(), 138 | CAMERA_PIXELFORMAT_JPEG); 139 | for (std::size_t i = 0; i < res.size(); i++) { 140 | ui->resolutionComboBox->addItem(QString::fromStdString(res[i])); 141 | } 142 | connect(ui->resolutionComboBox, &QComboBox::currentTextChanged, 143 | this, &MainWindow::onResolutionChanged); 144 | 145 | /* open camera */ 146 | camera->start(path.toStdString(), 147 | ui->formatComboBox->currentText().toStdString(), 148 | ui->resolutionComboBox->currentText().toStdString()); 149 | return; 150 | } 151 | 152 | void MainWindow::onPixelFormatChanged(const QString &format) 153 | { 154 | camera->restart(format.toStdString(), 155 | ui->resolutionComboBox->currentText().toStdString()); 156 | return; 157 | } 158 | 159 | void MainWindow::onResolutionChanged(const QString &res) 160 | { 161 | camera->restart(ui->formatComboBox->currentText().toStdString(), 162 | res.toStdString()); 163 | return; 164 | } 165 | 166 | void MainWindow::closeEvent(QCloseEvent *ev) 167 | { 168 | if (camera != nullptr) { 169 | camera->stop(); 170 | } 171 | return; 172 | } 173 | -------------------------------------------------------------------------------- /camera/usbhotplug.cpp: -------------------------------------------------------------------------------- 1 | #include "usbhotplug.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include "strings.hpp" 19 | 20 | std::string shellExecute(const std::string& command) 21 | { 22 | std::string result = ""; 23 | FILE *fpRead = popen(command.c_str(), "r"); 24 | char buf[1024]; 25 | memset(buf,'\0',sizeof(buf)); 26 | while (fgets(buf, 1024-1, fpRead)!=NULL) { 27 | result = buf; 28 | } 29 | if (fpRead != NULL) { 30 | pclose(fpRead); 31 | } 32 | auto it = result.find('\n'); 33 | result.erase(it); 34 | return result; 35 | } 36 | 37 | int checkVideo(const std::string &message, std::string &vidpid) 38 | { 39 | std::string key = "video4linux/video"; 40 | std::string::size_type pos = message.find(key); 41 | if (pos == std::string::npos) { 42 | return -1; 43 | } 44 | std::string strNum = message.substr(pos + key.size()); 45 | int num = std::atoi(strNum.c_str()); 46 | if (num%2 != 0) { 47 | return 1; 48 | } 49 | std::string videoName = "video" + strNum; 50 | std::string vid; 51 | { 52 | std::string cmd = Strings::format(1024, "cat /sys/class/video4linux/%s/device/modalias", videoName.c_str()); 53 | std::string result = shellExecute(cmd); 54 | int i = result.find('v'); 55 | vid = Strings::upper(result.substr(i + 1, 4)); 56 | } 57 | std::string pid; 58 | { 59 | std::string cmd = Strings::format(1024, "cat /sys/class/video4linux/%s/device/modalias", videoName.c_str()); 60 | std::string result = shellExecute(cmd); 61 | int i = result.find('p'); 62 | pid = Strings::upper(result.substr(i + 1, 4)); 63 | } 64 | vidpid = vid + ":" + pid; 65 | return 0; 66 | } 67 | 68 | void UsbHotplug::run() 69 | { 70 | std::cout<<"enter listen thread."< locker(mutex); 74 | condit.wait_for(locker, std::chrono::microseconds(500), [=]()->bool{ 75 | return state == STATE_RUN || state == STATE_TERMINATE; 76 | }); 77 | if (state == STATE_TERMINATE) { 78 | state = STATE_NONE; 79 | break; 80 | } 81 | } 82 | 83 | /* recv message*/ 84 | char buf[MESSAGE_BUFFER_SIZE] = {0}; 85 | ssize_t len = recv(fd, &buf, sizeof(buf), 0); 86 | if (len <= 0) { 87 | continue; 88 | } 89 | /* parse message */ 90 | #if 0 91 | printf("%s\n", buf); 92 | #endif 93 | std::string message(buf); 94 | int action = ACTION_NONE; 95 | if (message.find("remove@") != std::string::npos) { 96 | std::string::size_type pos = message.find("remove@"); 97 | std::string token = message.substr(7); 98 | for (auto& dev : deviceMap) { 99 | if (dev.second.token == token) { 100 | action = ACTION_DEVICE_DETACHED; 101 | dev.second.flag = action; 102 | dev.second.notify(action); 103 | break; 104 | } 105 | } 106 | } else if (message.find("add@") != std::string::npos) { 107 | std::string vidpid; 108 | std::string::size_type pos = message.find("add@"); 109 | std::string token = message.substr(4); 110 | action = ACTION_DEVICE_ATTACHED; 111 | pos = message.find("video4linux/video"); 112 | if (pos != std::string::npos) { 113 | int ret = checkVideo(message, vidpid); 114 | if (ret != 0) { 115 | continue; 116 | } 117 | auto it = deviceMap.find(vidpid); 118 | if (it != deviceMap.end()) { 119 | it->second.token = token; 120 | if (it->second.flag != action) { 121 | it->second.flag = action; 122 | it->second.notify(action); 123 | } 124 | } 125 | } else { 126 | for (auto& dev : deviceMap) { 127 | if (message.find(dev.first) != std::string::npos) { 128 | dev.second.token = token; 129 | if (dev.second.flag != action) { 130 | dev.second.flag = action; 131 | dev.second.notify(action); 132 | } 133 | break; 134 | } 135 | } 136 | } 137 | } 138 | } 139 | std::cout<<"leave listen thread."<(vidpid, dev)); 156 | return; 157 | } 158 | 159 | int UsbHotplug::start() 160 | { 161 | if (state != STATE_NONE) { 162 | return 0; 163 | } 164 | const int buffersize = 1024; 165 | fd = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_KOBJECT_UEVENT); 166 | if (fd == -1) { 167 | perror("socket"); 168 | return -1; 169 | } 170 | setsockopt(fd, SOL_SOCKET, SO_RCVBUF, &buffersize, sizeof(buffersize)); 171 | /* set nonblock */ 172 | int flags = fcntl(fd, F_GETFL, 0); 173 | if (flags < 0) { 174 | perror("fcntl(F_GETFL)"); 175 | return -2; 176 | } 177 | 178 | flags |= O_NONBLOCK; 179 | if (fcntl(fd, F_SETFL, flags) < 0) { 180 | perror("fcntl(F_SETFL)"); 181 | return -2; 182 | } 183 | 184 | struct sockaddr_nl snl; 185 | bzero(&snl, sizeof(struct sockaddr_nl)); 186 | snl.nl_family = AF_NETLINK; 187 | snl.nl_pid = getpid(); 188 | snl.nl_groups = 1; 189 | int ret = bind(fd, (struct sockaddr *)&snl, sizeof(struct sockaddr_nl)); 190 | if (ret < 0) { 191 | perror("bind"); 192 | close(fd); 193 | return -3; 194 | } 195 | state = STATE_RUN; 196 | listenThread = std::thread(&UsbHotplug::run, this); 197 | return 0; 198 | } 199 | 200 | void UsbHotplug::stop() 201 | { 202 | if (state == STATE_NONE) { 203 | return; 204 | } 205 | 206 | while (state != STATE_NONE) { 207 | std::unique_lock locker(mutex); 208 | state = STATE_TERMINATE; 209 | condit.notify_all(); 210 | condit.wait_for(locker, std::chrono::milliseconds(500), [=]()->bool{ 211 | return state == STATE_NONE; 212 | }); 213 | } 214 | listenThread.join(); 215 | if (fd != -1) { 216 | close(fd); 217 | } 218 | return; 219 | } 220 | -------------------------------------------------------------------------------- /src/settingdialog.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | SettingDialog 4 | 5 | 6 | 7 | 0 8 | 0 9 | 390 10 | 382 11 | 12 | 13 | 14 | Dialog 15 | 16 | 17 | 18 | 19 | 20 | 21 | 0 22 | 0 23 | 24 | 25 | 26 | 27 | 350 28 | 0 29 | 30 | 31 | 32 | 33 | 34 | 35 | hue: 36 | 37 | 38 | 39 | 40 | 41 | 42 | frequency 43 | 44 | 45 | 46 | 47 | 48 | 49 | Qt::Horizontal 50 | 51 | 52 | 53 | 54 | 55 | 56 | gamma 57 | 58 | 59 | 60 | 61 | 62 | 63 | default 64 | 65 | 66 | 67 | 68 | 69 | 70 | Qt::Horizontal 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | Qt::Horizontal 81 | 82 | 83 | 84 | 85 | 86 | 87 | Qt::Horizontal 88 | 89 | 90 | 91 | 92 | 93 | 94 | contrast: 95 | 96 | 97 | 98 | 99 | 100 | 101 | Qt::Horizontal 102 | 103 | 104 | 105 | 106 | 107 | 108 | Qt::Horizontal 109 | 110 | 111 | 112 | 113 | 114 | 115 | Qt::Horizontal 116 | 117 | 118 | 119 | 120 | 121 | 122 | brightness: 123 | 124 | 125 | 126 | 127 | 128 | 129 | exposure 130 | 131 | 132 | 133 | 134 | 135 | 136 | white balance: 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | saturation: 147 | 148 | 149 | 150 | 151 | 152 | 153 | sharpness: 154 | 155 | 156 | 157 | 158 | 159 | 160 | backlight compensation 161 | 162 | 163 | 164 | 165 | 166 | 167 | Qt::Horizontal 168 | 169 | 170 | 171 | 172 | 173 | 174 | Qt::Horizontal 175 | 176 | 177 | 178 | 179 | 180 | 181 | Qt::Horizontal 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | gain 192 | 193 | 194 | 195 | 196 | 197 | 198 | exposure absolute 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | Qt::Horizontal 209 | 210 | 211 | 212 | 40 213 | 20 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | -------------------------------------------------------------------------------- /src/settingdialog.cpp: -------------------------------------------------------------------------------- 1 | #include "settingdialog.h" 2 | #include "ui_settingdialog.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | SettingDialog::SettingDialog(Camera::Device *camera_, QWidget *parent) : 13 | QDialog(parent), 14 | ui(new Ui::SettingDialog), 15 | camera(camera_) 16 | { 17 | ui->setupUi(this); 18 | setWindowTitle("camera parameters"); 19 | /* brightness */ 20 | ui->brightnessSlider->setRange(-64, 64); 21 | connect(ui->brightnessSlider, &QSlider::valueChanged, this, [=](int value){ 22 | camera->setBrightness(value); 23 | }); 24 | /* white balance */ 25 | ui->whiteBalanceSlider->setRange(0, 6500); 26 | connect(ui->whiteBalanceSlider, &QSlider::valueChanged, this, [=](int value){ 27 | camera->setWhiteBalanceTemperature(value); 28 | }); 29 | ui->whiteBalanceComboBox->addItems(QStringList{"MANUAL","AUTO","INCANDESCENT", 30 | "FLUORESCENT","FLUORESCENT_H", 31 | "HORIZON","DAYLIGHT","FLASH","CLOUDY","SHADE"}); 32 | connect(ui->whiteBalanceComboBox, 33 | QOverload::of(&QComboBox::currentIndexChanged), this, [=](int value){ 34 | camera->setWhiteBalanceMode(value); 35 | }); 36 | /* contrast */ 37 | ui->contrastSlider->setRange(0, 64); 38 | connect(ui->contrastSlider, &QSlider::valueChanged, this, [=](int value){ 39 | camera->setContrast(value); 40 | }); 41 | /* saturation */ 42 | ui->saturationSlider->setRange(0, 128); 43 | connect(ui->saturationSlider, &QSlider::valueChanged, this, [=](int value){ 44 | camera->setSaturation(value); 45 | }); 46 | /* hue */ 47 | ui->hueSlider->setRange(-64, 64); 48 | connect(ui->hueSlider, &QSlider::valueChanged, this, [=](int value){ 49 | camera->setHue(value); 50 | }); 51 | /* sharpness */ 52 | ui->sharpnessSlider->setRange(0, 64); 53 | connect(ui->sharpnessSlider, &QSlider::valueChanged, this, [=](int value){ 54 | camera->setSharpness(value); 55 | }); 56 | /* backlight compensation */ 57 | ui->backlightCompensationSlider->setRange(0, 6000); 58 | connect(ui->backlightCompensationSlider, &QSlider::valueChanged, this, [=](int value){ 59 | camera->setBacklightCompensation(value); 60 | }); 61 | /* gamma */ 62 | ui->gammaSlider->setRange(0, 1000); 63 | connect(ui->gammaSlider, &QSlider::valueChanged, this, [=](int value){ 64 | camera->setGamma(value); 65 | }); 66 | /* exposure */ 67 | ui->exposureComboBox->addItems(QStringList{"AUTO", "MANUAL", "SHUTTER_PRIORITY", "APERTURE_PRIORITY"}); 68 | connect(ui->exposureComboBox, 69 | QOverload::of(&QComboBox::currentIndexChanged), this, [=](int value){ 70 | camera->setExposureMode(value); 71 | }); 72 | ui->exposureAbsoluteSlider->setRange(0, 6400); 73 | connect(ui->exposureAbsoluteSlider, &QSlider::valueChanged, this, [=](int value){ 74 | camera->setExposureAbsolute(value); 75 | }); 76 | /* gain */ 77 | ui->gainSlider->setRange(0, 100); 78 | connect(ui->gainSlider, &QSlider::valueChanged, this, [=](int value){ 79 | camera->setGain(value); 80 | }); 81 | /* frequency */ 82 | ui->frequenceComboBox->addItems(QStringList{"DISABLED", "50HZ", "60HZ", "AUTO"}); 83 | connect(ui->frequenceComboBox, 84 | QOverload::of(&QComboBox::currentIndexChanged), this, [=](int value){ 85 | camera->setPowerLineFrequence(value); 86 | }); 87 | /* set default */ 88 | connect(ui->defaultBtn, &QPushButton::clicked, 89 | this, &SettingDialog::setDefault); 90 | /* dump default parameters */ 91 | if (QDir(".").exists("camera_default_params.xml") == false) { 92 | saveParams("camera_default_params.xml"); 93 | } 94 | } 95 | 96 | void SettingDialog::saveParams(const QString &fileName) 97 | { 98 | /* open file */ 99 | QFile file(fileName); 100 | if (!file.open(QFile::WriteOnly|QFile::Truncate)) { 101 | return; 102 | } 103 | QDomDocument doc; 104 | /* instruction */ 105 | QDomProcessingInstruction instruction; 106 | instruction=doc.createProcessingInstruction("xml","version=\"1.0\" encoding=\"UTF-8\""); 107 | doc.appendChild(instruction); 108 | /* root */ 109 | QDomElement root = doc.createElement("Camera"); 110 | doc.appendChild(root); 111 | auto insertElement = [&doc, this](const QString &key, const QString &value, QDomElement &parentElement) { 112 | QDomElement element = doc.createElement(key); 113 | element.appendChild(doc.createTextNode(value)); 114 | parentElement.appendChild(element); 115 | }; 116 | /* add node and element */ 117 | QDomElement param = doc.createElement("Parameter"); 118 | insertElement("WhiteBalanceMode", 119 | QString::number(camera->getWhiteBalanceMode()), param); 120 | insertElement("WhiteBalanceTemperature", 121 | QString::number(camera->getWhiteBalanceTemperature()), param); 122 | insertElement("BrightnessMode", 123 | QString::number(camera->getBrightnessMode()), param); 124 | insertElement("Brightness", 125 | QString::number(camera->getBrightness()), param); 126 | insertElement("Contrast", 127 | QString::number(camera->getContrast()), param); 128 | insertElement("Saturation", 129 | QString::number(camera->getSaturation()), param); 130 | insertElement("Hue", 131 | QString::number(camera->getHue()), param); 132 | insertElement("Sharpness", 133 | QString::number(camera->getSharpness()), param); 134 | insertElement("BacklightCompensation", 135 | QString::number(camera->getBacklightCompensation()), param); 136 | insertElement("Gamma", 137 | QString::number(camera->getGamma()), param); 138 | insertElement("ExposureMode", 139 | QString::number(camera->getExposureMode()), param); 140 | insertElement("ExposureAbsolute", 141 | QString::number(camera->getExposureAbsolute()), param); 142 | insertElement("AutoGain", 143 | QString::number(camera->getAutoGain()), param); 144 | insertElement("Gain", 145 | QString::number(camera->getGain()), param); 146 | insertElement("PowerLineFrequence", 147 | QString::number(camera->getFrequency()), param); 148 | /* add node */ 149 | root.appendChild(param); 150 | /* output */ 151 | QTextStream out(&file); 152 | doc.save(out, 4); 153 | file.close(); 154 | return; 155 | } 156 | 157 | bool SettingDialog::loadParams(const QString &fileName) 158 | { 159 | QFile file(fileName); 160 | if (!file.open(QFile::ReadOnly)) { 161 | return false; 162 | } 163 | QDomDocument doc; 164 | if (!doc.setContent(&file)) { 165 | file.close(); 166 | return false; 167 | } 168 | file.close(); 169 | 170 | /* root */ 171 | QDomElement root = doc.documentElement(); 172 | QDomNode node = root.firstChild(); 173 | while (!node.isNull()) { 174 | if (node.isElement()) { 175 | QDomElement e = node.toElement(); 176 | if (e.tagName() == "Parameter") { 177 | QDomNodeList list=e.childNodes(); 178 | for (int i = 0; i < list.count(); i++) { 179 | QDomNode n = list.at(i); 180 | if (n.isElement() == true) { 181 | QString param = n.nodeName(); 182 | int value = n.toElement().text().toInt(); 183 | if (param == "WhiteBalanceMode") { 184 | camera->setWhiteBalanceMode(value); 185 | } else if (param == "WhiteBalanceTemperature") { 186 | camera->setWhiteBalanceTemperature(value); 187 | } else if (param == "BrightnessMode") { 188 | camera->setBrightnessMode(value); 189 | } else if (param == "Brightness") { 190 | camera->setBrightness(value); 191 | } else if (param == "Contrast") { 192 | camera->setContrast(value); 193 | } else if (param == "Saturation") { 194 | camera->setSaturation(value); 195 | } else if (param == "Hue") { 196 | camera->setHue(value); 197 | } else if (param == "Sharpness") { 198 | camera->setSharpness(value); 199 | } else if (param == "BacklightCompensation") { 200 | camera->setBacklightCompensation(value); 201 | } else if (param == "Gamma") { 202 | camera->setGamma(value); 203 | } else if (param == "ExposureMode") { 204 | camera->setExposureMode(value); 205 | } else if (param == "ExposureAbsolute") { 206 | camera->setExposureAbsolute(value); 207 | } else if (param == "AutoGain") { 208 | camera->setAutoGain(value); 209 | } else if (param == "Gain") { 210 | camera->setGain(value); 211 | } else if (param == "PowerLineFrequence") { 212 | camera->setPowerLineFrequence(value); 213 | } 214 | 215 | } 216 | } 217 | } 218 | } 219 | node = node.nextSibling(); 220 | } 221 | return true; 222 | } 223 | 224 | SettingDialog::~SettingDialog() 225 | { 226 | delete ui; 227 | } 228 | 229 | void SettingDialog::setDefault() 230 | { 231 | loadParams("camera_default_params.xml"); 232 | updateParam(); 233 | return; 234 | } 235 | 236 | void SettingDialog::updateParam() 237 | { 238 | /* brightness */ 239 | ui->brightnessSlider->setValue(camera->getBrightness()); 240 | /* white balance */ 241 | ui->whiteBalanceSlider->setValue(camera->getWhiteBalanceTemperature()); 242 | ui->whiteBalanceComboBox->setCurrentIndex(camera->getWhiteBalanceMode()); 243 | /* contrast */ 244 | ui->contrastSlider->setValue(camera->getContrast()); 245 | /* saturation */ 246 | ui->saturationSlider->setValue(camera->getSaturation()); 247 | /* hue */ 248 | ui->hueSlider->setValue(camera->getHue()); 249 | /* sharpness */ 250 | ui->sharpnessSlider->setValue(camera->getSharpness()); 251 | /* backlight compensation */ 252 | ui->backlightCompensationSlider->setValue(camera->getBacklightCompensation()); 253 | /* gamma */ 254 | ui->gammaSlider->setValue(camera->getGamma()); 255 | /* exposure */ 256 | ui->exposureComboBox->setCurrentIndex(camera->getExposureMode()); 257 | ui->exposureAbsoluteSlider->setValue(camera->getExposureAbsolute()); 258 | /* gain */ 259 | ui->gainSlider->setValue(camera->getGain()); 260 | /* frequency */ 261 | ui->frequenceComboBox->setCurrentIndex(camera->getFrequency()); 262 | return; 263 | } 264 | -------------------------------------------------------------------------------- /src/yolov5.cpp: -------------------------------------------------------------------------------- 1 | #include "yolov5.h" 2 | #include "ncnn/cpu.h" 3 | 4 | Yolov5::Yolov5() 5 | { 6 | labels = { 7 | "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 8 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 9 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 10 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 11 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 12 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 13 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 14 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 15 | "hair drier", "toothbrush" 16 | }; 17 | target_size = 640; 18 | prob_threshold = 0.25f; 19 | nms_threshold = 0.45f; 20 | /* memory pool */ 21 | blob_pool_allocator.set_size_compare_ratio(0.f); 22 | workspace_pool_allocator.set_size_compare_ratio(0.f); 23 | blob_pool_allocator.clear(); 24 | workspace_pool_allocator.clear(); 25 | yolov5.opt.blob_allocator = &blob_pool_allocator; 26 | yolov5.opt.workspace_allocator = &workspace_pool_allocator; 27 | /* optimization */ 28 | yolov5.opt.use_vulkan_compute = false; 29 | ncnn::set_cpu_powersave(2); 30 | ncnn::set_omp_num_threads(ncnn::get_big_cpu_count()); 31 | yolov5.opt = ncnn::Option(); 32 | yolov5.opt.num_threads = ncnn::get_big_cpu_count(); 33 | } 34 | 35 | bool Yolov5::load(const std::string &modelType) 36 | { 37 | if (modelType.empty()) { 38 | return false; 39 | } 40 | std::string paramFile = modelType + ".param"; 41 | std::string modelFile = modelType + ".bin"; 42 | /* load model */ 43 | int ret = yolov5.load_param(paramFile.c_str()); 44 | if (ret != 0) { 45 | return false; 46 | } 47 | ret = yolov5.load_model(modelFile.c_str()); 48 | if (ret != 0) { 49 | return false; 50 | } 51 | return true; 52 | } 53 | 54 | int Yolov5::detect(const cv::Mat &image, std::vector &objects) 55 | { 56 | int img_w = image.cols; 57 | int img_h = image.rows; 58 | 59 | // letterbox pad to multiple of MAX_STRIDE 60 | int w = img_w; 61 | int h = img_h; 62 | float scale = 1.f; 63 | if (w > h) { 64 | scale = (float)target_size / w; 65 | w = target_size; 66 | h = h * scale; 67 | } else { 68 | scale = (float)target_size / h; 69 | h = target_size; 70 | w = w * scale; 71 | } 72 | 73 | ncnn::Mat in = ncnn::Mat::from_pixels_resize(image.data, 74 | ncnn::Mat::PIXEL_RGB, 75 | img_w, img_h, w, h); 76 | 77 | // pad to target_size rectangle 78 | // yolov5/utils/datasets.py letterbox 79 | int wpad = (w + MAX_STRIDE - 1) / MAX_STRIDE * MAX_STRIDE - w; 80 | int hpad = (h + MAX_STRIDE - 1) / MAX_STRIDE * MAX_STRIDE - h; 81 | ncnn::Mat in_pad; 82 | ncnn::copy_make_border(in, in_pad, 83 | hpad / 2, hpad - hpad / 2, 84 | wpad / 2, wpad - wpad / 2, 85 | ncnn::BORDER_CONSTANT, 114.f); 86 | 87 | //const float mean[3] = {0.485*255, 0.456*255, 0.406*255}; 88 | //const float std[3] = {1.f/(0.229*255), 1.f/(0.224*255), 1.f/(0.225*255)}; 89 | const float mean_vals[3] = {0, 0, 0}; 90 | const float norm_vals[3] = {1.f/255, 1.f/255, 1.f/255}; 91 | in_pad.substract_mean_normalize(mean_vals, norm_vals); 92 | ncnn::Extractor ex = yolov5.create_extractor(); 93 | 94 | ex.input("images", in_pad); 95 | 96 | std::vector proposals; 97 | 98 | // anchor setting from yolov5/models/yolov5s.yaml 99 | 100 | // stride 8 101 | { 102 | ncnn::Mat out; 103 | ex.extract("output", out); 104 | 105 | ncnn::Mat anchors(6); 106 | anchors[0] = 10.f; 107 | anchors[1] = 13.f; 108 | anchors[2] = 16.f; 109 | anchors[3] = 30.f; 110 | anchors[4] = 33.f; 111 | anchors[5] = 23.f; 112 | 113 | std::vector objects8; 114 | generate_proposals(anchors, 8, in_pad, out, prob_threshold, objects8); 115 | 116 | proposals.insert(proposals.end(), objects8.begin(), objects8.end()); 117 | } 118 | 119 | // stride 16 120 | { 121 | ncnn::Mat out; 122 | #if YOLOV5_V60 123 | ex.extract("376", out); 124 | #else 125 | ex.extract("781", out); 126 | #endif 127 | 128 | ncnn::Mat anchors(6); 129 | anchors[0] = 30.f; 130 | anchors[1] = 61.f; 131 | anchors[2] = 62.f; 132 | anchors[3] = 45.f; 133 | anchors[4] = 59.f; 134 | anchors[5] = 119.f; 135 | 136 | std::vector objects16; 137 | generate_proposals(anchors, 16, in_pad, out, prob_threshold, objects16); 138 | 139 | proposals.insert(proposals.end(), objects16.begin(), objects16.end()); 140 | } 141 | 142 | // stride 32 143 | { 144 | ncnn::Mat out; 145 | #if YOLOV5_V60 146 | ex.extract("401", out); 147 | #else 148 | ex.extract("801", out); 149 | #endif 150 | ncnn::Mat anchors(6); 151 | anchors[0] = 116.f; 152 | anchors[1] = 90.f; 153 | anchors[2] = 156.f; 154 | anchors[3] = 198.f; 155 | anchors[4] = 373.f; 156 | anchors[5] = 326.f; 157 | 158 | std::vector objects32; 159 | generate_proposals(anchors, 32, in_pad, out, prob_threshold, objects32); 160 | 161 | proposals.insert(proposals.end(), objects32.begin(), objects32.end()); 162 | } 163 | 164 | // sort all proposals by score from highest to lowest 165 | qsort_descent_inplace(proposals); 166 | 167 | // apply nms with nms_threshold 168 | std::vector picked; 169 | nms_sorted_bboxes(proposals, picked, nms_threshold); 170 | 171 | int count = picked.size(); 172 | 173 | objects.resize(count); 174 | for (int i = 0; i < count; i++) { 175 | 176 | objects[i] = proposals[picked[i]]; 177 | 178 | // adjust offset to original unpadded 179 | float x0 = (objects[i].rect.x - (wpad / 2)) / scale; 180 | float y0 = (objects[i].rect.y - (hpad / 2)) / scale; 181 | float x1 = (objects[i].rect.x + objects[i].rect.width - (wpad / 2)) / scale; 182 | float y1 = (objects[i].rect.y + objects[i].rect.height - (hpad / 2)) / scale; 183 | 184 | // clip 185 | x0 = std::max(std::min(x0, (float)(img_w - 1)), 0.f); 186 | y0 = std::max(std::min(y0, (float)(img_h - 1)), 0.f); 187 | x1 = std::max(std::min(x1, (float)(img_w - 1)), 0.f); 188 | y1 = std::max(std::min(y1, (float)(img_h - 1)), 0.f); 189 | 190 | objects[i].rect.x = x0; 191 | objects[i].rect.y = y0; 192 | objects[i].rect.width = x1 - x0; 193 | objects[i].rect.height = y1 - y0; 194 | } 195 | return 0; 196 | } 197 | 198 | void Yolov5::draw(cv::Mat &image, const std::vector &objects) 199 | { 200 | for (size_t i = 0; i < objects.size(); i++) { 201 | const Yolov5::Object& obj = objects[i]; 202 | 203 | fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob, 204 | obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height); 205 | 206 | cv::rectangle(image, obj.rect, cv::Scalar(0, 255, 0), 2); 207 | 208 | char text[256]; 209 | sprintf(text, "%s %.1f%%", Yolov5::labels[obj.label].c_str(), obj.prob * 100); 210 | 211 | int baseLine = 0; 212 | cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 1, 1, &baseLine); 213 | 214 | int x = obj.rect.x; 215 | int y = obj.rect.y - label_size.height - baseLine; 216 | if (y < 0) { 217 | y = 0; 218 | } 219 | if (x + label_size.width > image.cols) { 220 | x = image.cols - label_size.width; 221 | } 222 | 223 | // cv::rectangle(image, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)), 224 | // cv::Scalar(0, 255, 0), 2); 225 | 226 | cv::putText(image, text, cv::Point(x, y + label_size.height), 227 | cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(255, 255, 255), 2); 228 | } 229 | return; 230 | } 231 | 232 | void Yolov5::qsort_descent_inplace(std::vector& objects, int left, int right) 233 | { 234 | int i = left; 235 | int j = right; 236 | float p = objects[(left + right) / 2].prob; 237 | 238 | while (i <= j) { 239 | while (objects[i].prob > p) 240 | i++; 241 | 242 | while (objects[j].prob < p) 243 | j--; 244 | 245 | if (i <= j) { 246 | // swap 247 | std::swap(objects[i], objects[j]); 248 | i++; 249 | j--; 250 | } 251 | } 252 | 253 | #pragma omp parallel sections 254 | { 255 | #pragma omp section 256 | { 257 | if (left < j) qsort_descent_inplace(objects, left, j); 258 | } 259 | #pragma omp section 260 | { 261 | if (i < right) qsort_descent_inplace(objects, i, right); 262 | } 263 | } 264 | return; 265 | } 266 | 267 | void Yolov5::qsort_descent_inplace(std::vector& objects) 268 | { 269 | if (objects.empty()) { 270 | return; 271 | } 272 | qsort_descent_inplace(objects, 0, objects.size() - 1); 273 | return; 274 | } 275 | 276 | void Yolov5::generate_proposals(const ncnn::Mat &anchors, int stride, const ncnn::Mat &in_pad, const ncnn::Mat &feat_blob, float prob_threshold, std::vector &objects) 277 | { 278 | const int num_grid = feat_blob.h; 279 | 280 | int num_grid_x; 281 | int num_grid_y; 282 | if (in_pad.w > in_pad.h) { 283 | num_grid_x = in_pad.w / stride; 284 | num_grid_y = num_grid / num_grid_x; 285 | } else { 286 | num_grid_y = in_pad.h / stride; 287 | num_grid_x = num_grid / num_grid_y; 288 | } 289 | 290 | const int num_class = feat_blob.w - 5; 291 | 292 | const int num_anchors = anchors.w / 2; 293 | 294 | for (int q = 0; q < num_anchors; q++) { 295 | const float anchor_w = anchors[q * 2]; 296 | const float anchor_h = anchors[q * 2 + 1]; 297 | 298 | const ncnn::Mat feat = feat_blob.channel(q); 299 | 300 | for (int i = 0; i < num_grid_y; i++) { 301 | for (int j = 0; j < num_grid_x; j++) { 302 | const float* featptr = feat.row(i * num_grid_x + j); 303 | 304 | // find class index with max class score 305 | int class_index = 0; 306 | float class_score = -FLT_MAX; 307 | for (int k = 0; k < num_class; k++) { 308 | float score = featptr[5 + k]; 309 | if (score > class_score) { 310 | class_index = k; 311 | class_score = score; 312 | } 313 | } 314 | 315 | float box_score = featptr[4]; 316 | 317 | float confidence = sigmoid(box_score) * sigmoid(class_score); 318 | 319 | if (confidence >= prob_threshold) { 320 | // yolov5/models/yolo.py Detect forward 321 | // y = x[i].sigmoid() 322 | // y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + self.grid[i].to(x[i].device)) * self.stride[i] # xy 323 | // y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh 324 | 325 | float dx = sigmoid(featptr[0]); 326 | float dy = sigmoid(featptr[1]); 327 | float dw = sigmoid(featptr[2]); 328 | float dh = sigmoid(featptr[3]); 329 | 330 | float pb_cx = (dx * 2.f - 0.5f + j) * stride; 331 | float pb_cy = (dy * 2.f - 0.5f + i) * stride; 332 | 333 | float pb_w = pow(dw * 2.f, 2) * anchor_w; 334 | float pb_h = pow(dh * 2.f, 2) * anchor_h; 335 | 336 | float x0 = pb_cx - pb_w * 0.5f; 337 | float y0 = pb_cy - pb_h * 0.5f; 338 | float x1 = pb_cx + pb_w * 0.5f; 339 | float y1 = pb_cy + pb_h * 0.5f; 340 | 341 | Object obj; 342 | obj.rect.x = x0; 343 | obj.rect.y = y0; 344 | obj.rect.width = x1 - x0; 345 | obj.rect.height = y1 - y0; 346 | obj.label = class_index; 347 | obj.prob = confidence; 348 | 349 | objects.push_back(obj); 350 | } 351 | } 352 | } 353 | } 354 | return; 355 | } 356 | 357 | void Yolov5::nms_sorted_bboxes(const std::vector &objects, std::vector &picked, float nms_threshold) 358 | { 359 | picked.clear(); 360 | 361 | const int n = objects.size(); 362 | 363 | std::vector areas(n); 364 | for (int i = 0; i < n; i++) { 365 | areas[i] = objects[i].rect.area(); 366 | } 367 | 368 | for (int i = 0; i < n; i++) { 369 | const Object& a = objects[i]; 370 | 371 | int keep = 1; 372 | for (int j = 0; j < (int)picked.size(); j++) { 373 | const Object& b = objects[picked[j]]; 374 | 375 | // intersection over union 376 | float inter_area = intersection_area(a, b); 377 | float union_area = areas[i] + areas[picked[j]] - inter_area; 378 | // float IoU = inter_area / union_area 379 | if (inter_area / union_area > nms_threshold) 380 | keep = 0; 381 | } 382 | 383 | if (keep) 384 | picked.push_back(i); 385 | } 386 | return; 387 | } 388 | -------------------------------------------------------------------------------- /camera/jpegwrap.cpp: -------------------------------------------------------------------------------- 1 | #include "jpegwrap.h" 2 | 3 | void Jpeg::errorNotify(j_common_ptr cinfo) 4 | { 5 | Jpeg::Error *error = (Jpeg::Error*) cinfo->err; 6 | (*cinfo->err->output_message) (cinfo); 7 | longjmp(error->setjmp_buffer, 1); 8 | return; 9 | } 10 | 11 | int Jpeg::encode(uint8_t*& jpeg, std::size_t &totalsize, 12 | uint8_t* rgb, int w, int h, int rowstride, int quality) 13 | { 14 | struct jpeg_compress_struct cinfo; 15 | Jpeg::Error jpegError; 16 | JSAMPROW row_pointer[1]; 17 | cinfo.err = jpeg_std_error(&jpegError.pub); 18 | jpegError.pub.error_exit = errorNotify; 19 | if (setjmp(jpegError.setjmp_buffer)) { 20 | jpeg_destroy_compress(&cinfo); 21 | return -1; 22 | } 23 | jpeg_create_compress(&cinfo); 24 | unsigned long size = 0; 25 | jpeg_mem_dest(&cinfo, &jpeg, &size); 26 | totalsize = size; 27 | cinfo.image_width = w; 28 | cinfo.image_height = h; 29 | cinfo.input_components = 3; 30 | cinfo.in_color_space = JCS_RGB; 31 | jpeg_set_defaults(&cinfo); 32 | jpeg_set_quality(&cinfo, quality, TRUE); 33 | jpeg_start_compress(&cinfo,TRUE); 34 | while (cinfo.next_scanline < cinfo.image_height) { 35 | row_pointer[0] = &rgb[cinfo.next_scanline*rowstride]; 36 | (void)jpeg_write_scanlines(&cinfo, row_pointer, 1); 37 | } 38 | jpeg_finish_compress(&cinfo); 39 | jpeg_destroy_compress(&cinfo); 40 | return 0; 41 | } 42 | 43 | int Jpeg::decode(uint8_t* &rgb, int &w, int &h, 44 | uint8_t *jpeg, std::size_t totalsize, int scale, int align) 45 | { 46 | if (jpeg == nullptr || totalsize == 0) { 47 | return -1; 48 | } 49 | struct jpeg_decompress_struct cinfo; 50 | Jpeg::Error jpegError; 51 | cinfo.err = jpeg_std_error(&jpegError.pub); 52 | jpegError.pub.error_exit = errorNotify; 53 | if (setjmp(jpegError.setjmp_buffer)) { 54 | jpeg_destroy_decompress(&cinfo); 55 | return -2; 56 | } 57 | jpeg_create_decompress(&cinfo); 58 | jpeg_mem_src(&cinfo, jpeg, totalsize); 59 | jpeg_read_header(&cinfo, TRUE); 60 | cinfo.scale_num = 1; 61 | cinfo.scale_denom = scale; 62 | if (!jpeg_start_decompress(&cinfo)) { 63 | return -3; 64 | } 65 | int rowstride = cinfo.output_width * cinfo.output_components; 66 | if (align == ALIGN_4) { 67 | rowstride = Jpeg::align4(cinfo.output_width, cinfo.output_components); 68 | } 69 | w = cinfo.output_width; 70 | h = cinfo.output_height; 71 | JSAMPARRAY buffer = (*cinfo.mem->alloc_sarray)((j_common_ptr) &cinfo, JPOOL_IMAGE, rowstride, 1); 72 | if (buffer == nullptr) { 73 | jpeg_finish_decompress(&cinfo); 74 | jpeg_destroy_decompress(&cinfo); 75 | return -4; 76 | } 77 | unsigned long pos = 0; 78 | while (cinfo.output_scanline < cinfo.output_height) { 79 | /* jpeg_read_scanlines expects an array of pointers to scanlines. 80 | * Here the array is only one element long, but you could ask for 81 | * more than one scanline at a time if that's more convenient. 82 | */ 83 | (void) jpeg_read_scanlines(&cinfo, buffer, 1); 84 | /* Assume put_scanline_someplace wants a pointer and sample count. */ 85 | 86 | //put_scanline_someplace(buffer[0], row_stride); 87 | memcpy(rgb + pos, buffer[0], rowstride); 88 | pos += rowstride; 89 | } 90 | jpeg_finish_decompress(&cinfo); 91 | jpeg_destroy_decompress(&cinfo); 92 | return 0; 93 | } 94 | 95 | int Jpeg::load(const char *filename, std::shared_ptr &img, int &h, int &w, int &c) 96 | { 97 | /* This struct contains the JPEG decompression parameters and pointers to 98 | * working space (which is allocated as needed by the JPEG library). 99 | */ 100 | struct jpeg_decompress_struct cinfo; 101 | /* We use our private extension JPEG error handler. 102 | * Note that this struct must live as long as the main JPEG parameter 103 | * struct, to avoid dangling-pointer problems. 104 | */ 105 | Jpeg::Error jerr; 106 | /* More stuff */ 107 | FILE * infile; /* source file */ 108 | JSAMPARRAY buffer; /* Output row buffer */ 109 | int row_stride; /* physical row width in output buffer */ 110 | 111 | /* In this example we want to open the input file before doing anything else, 112 | * so that the setjmp() error recovery below can assume the file is open. 113 | * VERY IMPORTANT: use "b" option to fopen() if you are on a machine that 114 | * requires it in order to read binary files. 115 | */ 116 | 117 | if ((infile = fopen(filename, "rb")) == NULL) { 118 | fprintf(stderr, "can't open %s\n", filename); 119 | return -1; 120 | } 121 | 122 | /* Step 1: allocate and initialize JPEG decompression object */ 123 | 124 | /* We set up the normal JPEG error routines, then override error_exit. */ 125 | cinfo.err = jpeg_std_error(&jerr.pub); 126 | jerr.pub.error_exit = errorNotify; 127 | /* Establish the setjmp return context for my_error_exit to use. */ 128 | if (setjmp(jerr.setjmp_buffer)) { 129 | /* If we get here, the JPEG code has signaled an error. 130 | * We need to clean up the JPEG object, close the input file, and return. 131 | */ 132 | jpeg_destroy_decompress(&cinfo); 133 | fclose(infile); 134 | return -1; 135 | } 136 | /* Now we can initialize the JPEG decompression object. */ 137 | jpeg_create_decompress(&cinfo); 138 | 139 | /* Step 2: specify data source (eg, a file) */ 140 | 141 | jpeg_stdio_src(&cinfo, infile); 142 | 143 | /* Step 3: read file parameters with jpeg_read_header() */ 144 | 145 | (void) jpeg_read_header(&cinfo, TRUE); 146 | /* We can ignore the return value from jpeg_read_header since 147 | * (a) suspension is not possible with the stdio data source, and 148 | * (b) we passed TRUE to reject a tables-only JPEG file as an error. 149 | * See libjpeg.txt for more info. 150 | */ 151 | 152 | /* Step 4: set parameters for decompression */ 153 | 154 | /* In this example, we don't need to change any of the defaults set by 155 | * jpeg_read_header(), so we do nothing here. 156 | */ 157 | 158 | /* Step 5: Start decompressor */ 159 | 160 | (void) jpeg_start_decompress(&cinfo); 161 | /* We can ignore the return value since suspension is not possible 162 | * with the stdio data source. 163 | */ 164 | 165 | /* We may need to do some setup of our own at this point before reading 166 | * the data. After jpeg_start_decompress() we have the correct scaled 167 | * output image dimensions available, as well as the output colormap 168 | * if we asked for color quantization. 169 | * In this example, we need to make an output work buffer of the right size. 170 | */ 171 | /* JSAMPLEs per row in output buffer */ 172 | row_stride = cinfo.output_width * cinfo.output_components; 173 | /* Make a one-row-high sample array that will go away when done with image */ 174 | buffer = (*cinfo.mem->alloc_sarray) 175 | ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); 176 | 177 | /* Step 6: while (scan lines remain to be read) */ 178 | /* jpeg_read_scanlines(...); */ 179 | 180 | /* Here we use the library's state variable cinfo.output_scanline as the 181 | * loop counter, so that we don't have to keep track ourselves. 182 | */ 183 | w = cinfo.output_width; 184 | h = cinfo.output_height; 185 | c = cinfo.output_components; 186 | img = std::shared_ptr(new uint8_t[row_stride * h]); 187 | int pos = 0; 188 | uint8_t* ptr = img.get(); 189 | while (cinfo.output_scanline < cinfo.output_height) { 190 | /* jpeg_read_scanlines expects an array of pointers to scanlines. 191 | * Here the array is only one element long, but you could ask for 192 | * more than one scanline at a time if that's more convenient. 193 | */ 194 | (void) jpeg_read_scanlines(&cinfo, buffer, 1); 195 | /* Assume put_scanline_someplace wants a pointer and sample count. */ 196 | 197 | //put_scanline_someplace(buffer[0], row_stride); 198 | memcpy(ptr + pos, buffer[0], row_stride); 199 | pos += row_stride; 200 | } 201 | 202 | /* Step 7: Finish decompression */ 203 | 204 | (void) jpeg_finish_decompress(&cinfo); 205 | /* We can ignore the return value since suspension is not possible 206 | * with the stdio data source. 207 | */ 208 | 209 | /* Step 8: Release JPEG decompression object */ 210 | 211 | /* This is an important step since it will release a good deal of memory. */ 212 | jpeg_destroy_decompress(&cinfo); 213 | 214 | /* After finish_decompress, we can close the input file. 215 | * Here we postpone it until after no more JPEG errors are possible, 216 | * so as to simplify the setjmp error logic above. (Actually, I don't 217 | * think that jpeg_destroy can do an error exit, but why assume anything...) 218 | */ 219 | fclose(infile); 220 | 221 | /* At this point you may want to check to see whether any corrupt-data 222 | * warnings occurred (test whether jerr.pub.num_warnings is nonzero). 223 | */ 224 | /* And we're done! */ 225 | return 0; 226 | } 227 | 228 | int Jpeg::save(const char *filename, uint8_t *img, int h, int w, int c, int quality) 229 | { 230 | /* This struct contains the JPEG compression parameters and pointers to 231 | * working space (which is allocated as needed by the JPEG library). 232 | * It is possible to have several such structures, representing multiple 233 | * compression/decompression processes, in existence at once. We refer 234 | * to any one struct (and its associated working data) as a "JPEG object". 235 | */ 236 | struct jpeg_compress_struct cinfo; 237 | /* This struct represents a JPEG error handler. It is declared separately 238 | * because applications often want to supply a specialized error handler 239 | * (see the second half of this file for an example). But here we just 240 | * take the easy way out and use the standard error handler, which will 241 | * print a message on stderr and call exit() if compression fails. 242 | * Note that this struct must live as long as the main JPEG parameter 243 | * struct, to avoid dangling-pointer problems. 244 | */ 245 | struct jpeg_error_mgr jerr; 246 | /* More stuff */ 247 | FILE * outfile; /* target file */ 248 | JSAMPROW row_pointer[1]; /* pointer to JSAMPLE row[s] */ 249 | int row_stride; /* physical row width in image buffer */ 250 | 251 | /* Step 1: allocate and initialize JPEG compression object */ 252 | 253 | /* We have to set up the error handler first, in case the initialization 254 | * step fails. (Unlikely, but it could happen if you are out of memory.) 255 | * This routine fills in the contents of struct jerr, and returns jerr's 256 | * address which we place into the link field in cinfo. 257 | */ 258 | cinfo.err = jpeg_std_error(&jerr); 259 | /* Now we can initialize the JPEG compression object. */ 260 | jpeg_create_compress(&cinfo); 261 | 262 | /* Step 2: specify data destination (eg, a file) */ 263 | /* Note: steps 2 and 3 can be done in either order. */ 264 | 265 | /* Here we use the library-supplied code to send compressed data to a 266 | * stdio stream. You can also write your own code to do something else. 267 | * VERY IMPORTANT: use "b" option to fopen() if you are on a machine that 268 | * requires it in order to write binary files. 269 | */ 270 | if ((outfile = fopen(filename, "wb")) == NULL) { 271 | fprintf(stderr, "can't open %s\n", filename); 272 | return -1; 273 | } 274 | jpeg_stdio_dest(&cinfo, outfile); 275 | 276 | /* Step 3: set parameters for compression */ 277 | 278 | /* First we supply a description of the input image. 279 | * Four fields of the cinfo struct must be filled in: 280 | */ 281 | cinfo.image_width = w; /* image width and height, in pixels */ 282 | cinfo.image_height = h; 283 | cinfo.input_components = c; /* # of color components per pixel */ 284 | cinfo.in_color_space = JCS_RGB; /* colorspace of input image */ 285 | /* Now use the library's routine to set default compression parameters. 286 | * (You must set at least cinfo.in_color_space before calling this, 287 | * since the defaults depend on the source color space.) 288 | */ 289 | jpeg_set_defaults(&cinfo); 290 | /* Now you can set any non-default parameters you wish to. 291 | * Here we just illustrate the use of quality (quantization table) scaling: 292 | */ 293 | jpeg_set_quality(&cinfo, quality, TRUE /* limit to baseline-JPEG values */); 294 | 295 | /* Step 4: Start compressor */ 296 | 297 | /* TRUE ensures that we will write a complete interchange-JPEG file. 298 | * Pass TRUE unless you are very sure of what you're doing. 299 | */ 300 | jpeg_start_compress(&cinfo, TRUE); 301 | 302 | /* Step 5: while (scan lines remain to be written) */ 303 | /* jpeg_write_scanlines(...); */ 304 | 305 | /* Here we use the library's state variable cinfo.next_scanline as the 306 | * loop counter, so that we don't have to keep track ourselves. 307 | * To keep things simple, we pass one scanline per call; you can pass 308 | * more if you wish, though. 309 | */ 310 | row_stride = w * 3; /* JSAMPLEs per row in image_buffer */ 311 | 312 | while (cinfo.next_scanline < cinfo.image_height) { 313 | /* jpeg_write_scanlines expects an array of pointers to scanlines. 314 | * Here the array is only one element long, but you could pass 315 | * more than one scanline at a time if that's more convenient. 316 | */ 317 | row_pointer[0] = & img[cinfo.next_scanline * row_stride]; 318 | (void) jpeg_write_scanlines(&cinfo, row_pointer, 1); 319 | } 320 | 321 | /* Step 6: Finish compression */ 322 | 323 | jpeg_finish_compress(&cinfo); 324 | /* After finish_compress, we can close the output file. */ 325 | fclose(outfile); 326 | 327 | /* Step 7: release JPEG compression object */ 328 | 329 | /* This is an important step since it will release a good deal of memory. */ 330 | jpeg_destroy_compress(&cinfo); 331 | 332 | /* And we're done! */ 333 | return 0; 334 | } 335 | -------------------------------------------------------------------------------- /camera/camera.h: -------------------------------------------------------------------------------- 1 | #ifndef CAMERA_H 2 | #define CAMERA_H 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include "libyuv.h" 29 | #include "jpegwrap.h" 30 | #include "strings.hpp" 31 | 32 | #define CAMERA_PIXELFORMAT_YUYV "YUYV" 33 | #define CAMERA_PIXELFORMAT_JPEG "JPEG" 34 | namespace Camera { 35 | 36 | enum Code { 37 | CODE_OK = 0, 38 | CODE_DEV_EMPTY = -1, 39 | CODE_DEV_NOTFOUND = -2, 40 | CODE_DEV_OPENFAILED = -3 41 | }; 42 | 43 | enum DecodeType { 44 | Decode_SYNC = 0, 45 | Decode_ASYNC, 46 | Decode_PINGPONG 47 | }; 48 | 49 | enum ParamFlag { 50 | Param_Auto = 0, 51 | Param_Manual 52 | }; 53 | 54 | struct Param { 55 | int minVal; 56 | int maxVal; 57 | int value; 58 | int defaultVal; 59 | int step; 60 | int flag; 61 | }; 62 | 63 | struct Params { 64 | Param whiteBalance; 65 | Param brightness; 66 | Param contrast; 67 | Param saturation; 68 | Param hue; 69 | Param sharpness; 70 | Param backlightCompensation; 71 | Param gamma; 72 | Param exposure; 73 | Param gain; 74 | Param powerLineFrequence; 75 | 76 | }; 77 | 78 | struct DeviceParam { 79 | int whiteBalanceMode; 80 | int whiteBalanceTemperature; 81 | int brightnessMode; 82 | int brightness; 83 | int contrast; 84 | int saturation; 85 | int hue; 86 | int sharpness; 87 | int backlightCompensation; 88 | int gamma; 89 | int exposureMode; 90 | int exposureAbsolute; 91 | int autoGain; 92 | int gain; 93 | int powerLineFrequence; 94 | }; 95 | struct Property { 96 | std::string path; 97 | unsigned short vendorID; 98 | unsigned short productID; 99 | }; 100 | 101 | enum PixelType { 102 | Pixel_MJPEG = 0, 103 | Pixel_YUYV 104 | }; 105 | 106 | struct PixelFormat { 107 | std::string formatString; 108 | unsigned int formatInt; 109 | }; 110 | 111 | 112 | class Frame 113 | { 114 | public: 115 | unsigned char* data; 116 | unsigned long length; 117 | unsigned long capacity; 118 | public: 119 | static unsigned long align(unsigned long s) 120 | { 121 | unsigned long size = s; 122 | if (size&0x3ff) { 123 | size = ((size >> 10) + 1)<<10; 124 | } 125 | return size; 126 | } 127 | Frame():data(nullptr), length(0), capacity(0){} 128 | ~Frame(){} 129 | void allocate(unsigned long size) 130 | { 131 | if (data == nullptr) { 132 | capacity = align(size); 133 | length = size; 134 | data = new unsigned char[capacity]; 135 | } else { 136 | if (size > capacity) { 137 | delete [] data; 138 | capacity = align(size); 139 | length = size; 140 | data = new unsigned char[capacity]; 141 | } else { 142 | length = size; 143 | } 144 | } 145 | return; 146 | } 147 | void copy(unsigned char *d, unsigned long s) 148 | { 149 | allocate(s); 150 | memcpy(data, d, length); 151 | return; 152 | } 153 | void clear() 154 | { 155 | if (data) { 156 | delete [] data; 157 | data = nullptr; 158 | } 159 | length = 0; 160 | capacity = 0; 161 | return; 162 | } 163 | }; 164 | 165 | using FnProcessImage = std::function; 166 | 167 | class IDecoder 168 | { 169 | protected: 170 | int width; 171 | int height; 172 | std::string formatString; 173 | FnProcessImage processImage; 174 | public: 175 | IDecoder(){} 176 | explicit IDecoder(const FnProcessImage &func):processImage(func){} 177 | virtual ~IDecoder(){} 178 | 179 | virtual void setFormat(int w, int h, const std::string &format){} 180 | 181 | virtual void sample(unsigned char* data, unsigned long length){} 182 | 183 | virtual void run(){} 184 | 185 | virtual void start(){} 186 | 187 | virtual void stop(){} 188 | }; 189 | 190 | class Decoder : public IDecoder 191 | { 192 | private: 193 | int index; 194 | Frame outputFrame[4]; 195 | public: 196 | Decoder():index(0){} 197 | explicit Decoder(const FnProcessImage &func) 198 | :IDecoder(func),index(0){} 199 | ~Decoder() 200 | { 201 | for (int i = 0; i < 4; i++) { 202 | outputFrame[i].clear(); 203 | } 204 | } 205 | virtual void setFormat(int w, int h, const std::string &format) override 206 | { 207 | width = w; 208 | height = h; 209 | formatString = format; 210 | unsigned long length = width * height * 4; 211 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 212 | length = Jpeg::align4(width, 3)*height; 213 | } else if (formatString == CAMERA_PIXELFORMAT_YUYV) { 214 | length = width * height * 4; 215 | } 216 | for (int i = 0; i < 4; i++) { 217 | outputFrame[i].allocate(length); 218 | } 219 | return; 220 | } 221 | 222 | virtual void sample(unsigned char* data, unsigned long length) 223 | { 224 | Frame& frame = outputFrame[index]; 225 | index = (index + 1)%4; 226 | /* set format */ 227 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 228 | Jpeg::decode(frame.data, width, height, data, length, Jpeg::ALIGN_4); 229 | /* process */ 230 | processImage(height, width, 3, frame.data); 231 | } else if(formatString == CAMERA_PIXELFORMAT_YUYV) { 232 | int alignedWidth = (width + 1) & ~1; 233 | libyuv::YUY2ToARGB(data, alignedWidth * 2, 234 | frame.data, width * 4, 235 | width, height); 236 | processImage(height, width, 4, frame.data); 237 | } else { 238 | printf("decode failed. format: %s", formatString.c_str()); 239 | } 240 | return; 241 | } 242 | }; 243 | 244 | class AsyncDecoder : public IDecoder 245 | { 246 | public: 247 | enum State { 248 | STATE_NONE = 0, 249 | STATE_PREPENDING, 250 | STATE_READY, 251 | STATE_PROCESSING, 252 | STATE_TERMINATE 253 | }; 254 | private: 255 | int index; 256 | int state; 257 | std::condition_variable condit; 258 | std::mutex mutex; 259 | std::thread processThread; 260 | Frame frameBuffer; 261 | Frame outputFrame[4]; 262 | protected: 263 | void run() 264 | { 265 | printf("enter process function.\n"); 266 | while (1) { 267 | std::unique_lock locker(mutex); 268 | condit.wait_for(locker, std::chrono::milliseconds(1000), [this]()->bool{ 269 | return state == STATE_TERMINATE || state == STATE_READY; 270 | }); 271 | if (state == STATE_TERMINATE) { 272 | state = STATE_NONE; 273 | break; 274 | } else if (state == STATE_PREPENDING) { 275 | continue; 276 | } 277 | 278 | state = STATE_PROCESSING; 279 | 280 | Frame& inputFrame = frameBuffer; 281 | if (inputFrame.data == nullptr) { 282 | continue; 283 | } 284 | Frame& frame = outputFrame[index]; 285 | index = (index + 1)%4; 286 | /* set format */ 287 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 288 | Jpeg::decode(frame.data, width, height, inputFrame.data, inputFrame.length, Jpeg::ALIGN_4); 289 | /* process */ 290 | processImage(height, width, 3, frame.data); 291 | } else if(formatString == CAMERA_PIXELFORMAT_YUYV) { 292 | int alignedWidth = (width + 1) & ~1; 293 | libyuv::YUY2ToARGB(inputFrame.data, alignedWidth * 2, 294 | frame.data, width * 4, 295 | width, height); 296 | processImage(height, width, 4, frame.data); 297 | } else { 298 | printf("decode failed. format: %s", formatString.c_str()); 299 | } 300 | 301 | if (state != STATE_TERMINATE) { 302 | state = STATE_PREPENDING; 303 | condit.notify_all(); 304 | } 305 | 306 | } 307 | printf("leave process function.\n"); 308 | return; 309 | } 310 | public: 311 | AsyncDecoder():index(0),state(STATE_NONE){} 312 | explicit AsyncDecoder(const FnProcessImage &func) 313 | :IDecoder(func),index(0),state(STATE_NONE){} 314 | ~AsyncDecoder() 315 | { 316 | frameBuffer.clear(); 317 | for (int i = 0; i < 4; i++) { 318 | outputFrame[i].clear(); 319 | } 320 | } 321 | 322 | virtual void setFormat(int w, int h, const std::string &format) override 323 | { 324 | width = w; 325 | height = h; 326 | formatString = format; 327 | unsigned long length = width * height * 4; 328 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 329 | length = Jpeg::align4(width, 3)*height; 330 | } else if (formatString == CAMERA_PIXELFORMAT_YUYV) { 331 | length = width * height * 4; 332 | } 333 | std::unique_lock locker(mutex); 334 | for (int i = 0; i < 4; i++) { 335 | outputFrame[i].allocate(length); 336 | } 337 | return; 338 | } 339 | 340 | virtual void sample(unsigned char* data, unsigned long length) override 341 | { 342 | if (state == STATE_PREPENDING) { 343 | std::unique_lock locker(mutex); 344 | frameBuffer.copy(data, length); 345 | state = STATE_READY; 346 | condit.notify_all(); 347 | } 348 | return; 349 | } 350 | 351 | virtual void start() override 352 | { 353 | if (state != STATE_NONE) { 354 | return; 355 | } 356 | state = STATE_PREPENDING; 357 | processThread = std::thread(&AsyncDecoder::run, this); 358 | return; 359 | } 360 | 361 | virtual void stop() override 362 | { 363 | if (state == STATE_NONE) { 364 | return; 365 | } 366 | while (state != STATE_NONE) { 367 | std::unique_lock locker(mutex); 368 | state = STATE_TERMINATE; 369 | condit.notify_all(); 370 | condit.wait_for(locker, std::chrono::milliseconds(500), [=]()->bool{ 371 | return state == STATE_NONE; 372 | }); 373 | } 374 | processThread.join(); 375 | return; 376 | } 377 | 378 | }; 379 | 380 | class PingPongDecoder : public IDecoder 381 | { 382 | public: 383 | constexpr static int max_buffer_len = 8; 384 | private: 385 | int in; 386 | int out; 387 | std::atomic isRunning; 388 | std::thread processThread; 389 | Frame frameBuffer[8]; 390 | Frame outputFrame[8]; 391 | protected: 392 | virtual void run() override 393 | { 394 | printf("enter process function.\n"); 395 | while (isRunning.load()) { 396 | int index = out; 397 | Frame& inputFrame = frameBuffer[index]; 398 | if (inputFrame.data == nullptr) { 399 | continue; 400 | } 401 | Frame& frame = outputFrame[index]; 402 | /* set format */ 403 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 404 | Jpeg::decode(frame.data, width, height, inputFrame.data, inputFrame.length, Jpeg::ALIGN_4); 405 | /* process */ 406 | processImage(height, width, 3, frame.data); 407 | } else if(formatString == CAMERA_PIXELFORMAT_YUYV) { 408 | int alignedWidth = (width + 1) & ~1; 409 | libyuv::YUY2ToARGB(inputFrame.data, alignedWidth * 2, 410 | frame.data, width * 4, 411 | width, height); 412 | processImage(height, width, 4, frame.data); 413 | } else { 414 | printf("decode failed. format: %s", formatString.c_str()); 415 | } 416 | 417 | } 418 | printf("leave process function.\n"); 419 | return; 420 | } 421 | public: 422 | PingPongDecoder():in(0),out(0),isRunning(false){} 423 | explicit PingPongDecoder(const FnProcessImage &func) 424 | :IDecoder(func),in(0),out(0),isRunning(false){} 425 | ~PingPongDecoder() 426 | { 427 | for (int i = 0; i < 8; i++) { 428 | frameBuffer[i].clear(); 429 | outputFrame[i].clear(); 430 | } 431 | } 432 | 433 | virtual void setFormat(int w, int h, const std::string &format) override 434 | { 435 | width = w; 436 | height = h; 437 | formatString = format; 438 | unsigned long length = width * height * 4; 439 | if (formatString == CAMERA_PIXELFORMAT_JPEG) { 440 | length = Jpeg::align4(width, 3)*height; 441 | } else if (formatString == CAMERA_PIXELFORMAT_YUYV) { 442 | length = width * height * 4; 443 | } 444 | for (int i = 0; i < 8; i++) { 445 | outputFrame[i].allocate(length); 446 | } 447 | return; 448 | } 449 | 450 | virtual void sample(unsigned char* data, unsigned long length) override 451 | { 452 | frameBuffer[in].copy(data, length); 453 | out = in; 454 | in = (in + 1)%8; 455 | return; 456 | } 457 | 458 | virtual void start() override 459 | { 460 | if (isRunning.load()) { 461 | return; 462 | } 463 | isRunning.store(true); 464 | processThread = std::thread(&PingPongDecoder::run, this); 465 | return; 466 | } 467 | 468 | virtual void stop() override 469 | { 470 | if (isRunning.load()) { 471 | isRunning.store(false); 472 | processThread.join(); 473 | } 474 | return; 475 | } 476 | }; 477 | 478 | class Device 479 | { 480 | public: 481 | static constexpr int mmapBlockCount = 4; 482 | protected: 483 | /* device */ 484 | int fd; 485 | std::string devPath; 486 | IDecoder *decoder; 487 | /* sample */ 488 | int sampleTimeout; 489 | std::atomic isRunning; 490 | Frame sharedMem[mmapBlockCount]; 491 | std::thread sampleThread; 492 | /* camera property */ 493 | std::vector formatList; 494 | std::map > resolutionMap; 495 | protected: 496 | static std::string shellExecute(const std::string& command); 497 | static unsigned short getVendorID(const char* name); 498 | static unsigned short getProductID(const char* name); 499 | static int openDevice(const std::string &path); 500 | void onSample(); 501 | /* shared memory */ 502 | bool attachSharedMemory(); 503 | void dettachSharedMemory(); 504 | bool checkCapability(); 505 | bool setFormat(int w, int h, const std::string &format); 506 | int openPath(const std::string &path, const std::string &format, const std::string &res); 507 | void closeDevice(); 508 | public: 509 | explicit Device(int decodeType, const FnProcessImage &func); 510 | ~Device(); 511 | static std::vector enumerate(); 512 | static std::vector getPixelFormatList(const std::string &path); 513 | static std::vector getResolutionList(const std::string &path, const std::string &pixelFormat); 514 | /* start - stop */ 515 | int start(const std::string &path, const std::string &format, const std::string &res); 516 | int start(unsigned short vid, unsigned short pid, const std::string &pixelFormat, int resIndex=0); 517 | void stop(); 518 | bool startSample(); 519 | bool stopSample(); 520 | void clear(); 521 | void restart(const std::string &format, const std::string &res); 522 | /* parameter */ 523 | void setParam(unsigned int controlID, int value); 524 | int getParamRange(unsigned int controlID, int modeID, Param ¶m); 525 | int getParam(unsigned int controlID); 526 | /* white balance */ 527 | void setWhiteBalanceMode(int value = V4L2_WHITE_BALANCE_MANUAL); 528 | int getWhiteBalanceMode(); 529 | void setWhiteBalanceTemperature(int value); 530 | int getWhiteBalanceTemperature(); 531 | /* brightness */ 532 | void setBrightnessMode(int value); 533 | int getBrightnessMode(); 534 | void setBrightness(int value); 535 | int getBrightness(); 536 | /* contrast */ 537 | void setContrast(int value); 538 | int getContrast(); 539 | /* saturation */ 540 | void setSaturation(int value); 541 | int getSaturation(); 542 | /* hue */ 543 | void setHue(int value); 544 | int getHue(); 545 | /* sharpness */ 546 | void setSharpness(int value); 547 | int getSharpness(); 548 | /* backlight compensation */ 549 | void setBacklightCompensation(int value); 550 | int getBacklightCompensation(); 551 | /* gamma */ 552 | void setGamma(int value); 553 | int getGamma(); 554 | /* exposure */ 555 | void setExposureMode(int value = V4L2_EXPOSURE_MANUAL); 556 | int getExposureMode(); 557 | void setExposure(int value); 558 | int getExposure(); 559 | void setExposureAbsolute(int value); 560 | int getExposureAbsolute(); 561 | /* gain */ 562 | void setAutoGain(int value); 563 | int getAutoGain(); 564 | void setGain(int value); 565 | int getGain(); 566 | /* frequency */ 567 | void setPowerLineFrequence(int value); 568 | int getFrequency(); 569 | /* default paramter */ 570 | void setDefaultParam(); 571 | void setParam(const DeviceParam ¶m); 572 | 573 | }; 574 | 575 | } 576 | #endif // CAMERA_H 577 | -------------------------------------------------------------------------------- /camera/camera.cpp: -------------------------------------------------------------------------------- 1 | #include "camera.h" 2 | #include 3 | #include 4 | 5 | Camera::Device::Device(int decodeType, const Camera::FnProcessImage &func) 6 | :fd(-1),sampleTimeout(5),isRunning(0),decoder(nullptr) 7 | { 8 | if (decodeType == Camera::Decode_ASYNC) { 9 | decoder = new AsyncDecoder(func); 10 | } else if (decodeType == Camera::Decode_PINGPONG) { 11 | decoder = new PingPongDecoder(func); 12 | } else { 13 | decoder = new Decoder(func); 14 | } 15 | } 16 | 17 | Camera::Device::~Device() 18 | { 19 | if (decoder) { 20 | delete decoder; 21 | decoder = nullptr; 22 | } 23 | } 24 | 25 | void Camera::Device::onSample() 26 | { 27 | printf("enter sampling function.\n"); 28 | while (isRunning.load()) { 29 | fd_set fds; 30 | FD_ZERO(&fds); 31 | FD_SET(fd, &fds); 32 | /*Timeout*/ 33 | struct timeval tv; 34 | tv.tv_sec = sampleTimeout; 35 | tv.tv_usec = 0; 36 | int ret = select(fd + 1, &fds, NULL, NULL, &tv); 37 | if (ret == -1) { 38 | if (EINTR == errno) { 39 | continue; 40 | } 41 | perror("Fail to select"); 42 | continue; 43 | } 44 | if (ret == 0) { 45 | fprintf(stderr,"select Timeout\n"); 46 | continue; 47 | } 48 | struct v4l2_buffer buf; 49 | memset(&buf, 0, sizeof(buf)); 50 | buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 51 | buf.memory = V4L2_MEMORY_MMAP; 52 | // put cache from queue 53 | if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) { 54 | perror("0 Fail to ioctl 'VIDIOC_DQBUF'"); 55 | continue; 56 | } 57 | /* copy */ 58 | decoder->sample(sharedMem[buf.index].data, buf.bytesused); 59 | /* dequeue */ 60 | if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) { 61 | perror("0 Fail to ioctl 'VIDIOC_QBUF'"); 62 | } 63 | } 64 | printf("leave sampling function.\n"); 65 | return; 66 | } 67 | 68 | int Camera::Device::openDevice(const std::string &path) 69 | { 70 | int fd = open(path.c_str(), O_RDWR, 0); 71 | if (fd < 0) { 72 | perror("at Camera::Device::openDevice, fail to open device, error"); 73 | return -1; 74 | }; 75 | /* input */ 76 | struct v4l2_input input; 77 | input.index = 0; 78 | if (ioctl(fd, VIDIOC_S_INPUT, &input) == -1) { 79 | perror("Failed to ioctl VIDIOC_S_INPUT"); 80 | close(fd); 81 | return -2; 82 | } 83 | /* frame */ 84 | v4l2_streamparm streamParam; 85 | memset(&streamParam, 0, sizeof(struct v4l2_streamparm)); 86 | streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 87 | streamParam.parm.capture.timeperframe.numerator = 1; 88 | streamParam.parm.capture.timeperframe.denominator = 2; 89 | if (ioctl(fd, VIDIOC_S_PARM, &streamParam) == -1) { 90 | perror("failed to set frame"); 91 | } 92 | return fd; 93 | } 94 | 95 | bool Camera::Device::checkCapability() 96 | { 97 | /* check video decive driver capability */ 98 | struct v4l2_capability cap; 99 | if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { 100 | fprintf(stderr, "fail to ioctl VIDEO_QUERYCAP \n"); 101 | close(fd); 102 | fd = -1; 103 | return false; 104 | } 105 | 106 | if (!(cap.capabilities & V4L2_BUF_TYPE_VIDEO_CAPTURE)) { 107 | fprintf(stderr, "The Current device is not a video capture device \n"); 108 | close(fd); 109 | fd = -1; 110 | return false; 111 | } 112 | 113 | if (!(cap.capabilities & V4L2_CAP_STREAMING)) { 114 | printf("The Current device does not support streaming i/o\n"); 115 | close(fd); 116 | fd = -1; 117 | return false; 118 | } 119 | return true; 120 | } 121 | 122 | bool Camera::Device::setFormat(int w, int h, const std::string &format) 123 | { 124 | if (format.empty()) { 125 | perror("farmat is empty"); 126 | return false; 127 | } 128 | /* set format */ 129 | struct v4l2_format fmt; 130 | fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 131 | fmt.fmt.pix.width = w; 132 | fmt.fmt.pix.height = h; 133 | if (format == CAMERA_PIXELFORMAT_JPEG) { 134 | fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; 135 | } else if (format == CAMERA_PIXELFORMAT_YUYV) { 136 | fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; 137 | } 138 | fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; 139 | if (ioctl(fd, VIDIOC_S_FMT, &fmt) < 0) { 140 | perror("VIDIOC_S_FMT set err"); 141 | return false; 142 | } 143 | /* allocate memory for image */ 144 | decoder->setFormat(w, h, format); 145 | return true; 146 | } 147 | 148 | bool Camera::Device::attachSharedMemory() 149 | { 150 | struct v4l2_requestbuffers reqbufs; 151 | memset(&reqbufs, 0, sizeof(reqbufs)); 152 | reqbufs.count = mmapBlockCount; 153 | reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 154 | reqbufs.memory = V4L2_MEMORY_MMAP; 155 | if (ioctl(fd, VIDIOC_REQBUFS, &reqbufs) == -1) { 156 | perror("Fail to ioctl 'VIDIOC_REQBUFS'"); 157 | close(fd); 158 | fd = -1; 159 | return false; 160 | } 161 | /* map kernel cache to user process */ 162 | for (std::size_t i = 0; i < mmapBlockCount; i++) { 163 | //stand for a frame 164 | struct v4l2_buffer buf; 165 | memset(&buf, 0, sizeof(buf)); 166 | buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 167 | buf.memory = V4L2_MEMORY_MMAP; 168 | buf.index = i; 169 | /*check the information of the kernel cache requested*/ 170 | if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) { 171 | perror("Fail to ioctl : VIDIOC_QUERYBUF"); 172 | close(fd); 173 | fd = -1; 174 | return false; 175 | } 176 | sharedMem[i].length = buf.length; 177 | sharedMem[i].data = (unsigned char*)mmap(NULL, buf.length, 178 | PROT_READ | PROT_WRITE, MAP_SHARED, 179 | fd, buf.m.offset); 180 | if (sharedMem[i].data == MAP_FAILED) { 181 | perror("Fail to mmap"); 182 | close(fd); 183 | fd = -1; 184 | return false; 185 | } 186 | } 187 | /* put the kernel cache to a queue */ 188 | for (std::size_t i = 0; i < mmapBlockCount; i++) { 189 | struct v4l2_buffer buf; 190 | memset(&buf, 0, sizeof(buf)); 191 | buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 192 | buf.memory = V4L2_MEMORY_MMAP; 193 | buf.index = i; 194 | if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) { 195 | perror("Fail to ioctl 'VIDIOC_QBUF'"); 196 | closeDevice(); 197 | return false; 198 | } 199 | } 200 | return true; 201 | } 202 | 203 | void Camera::Device::dettachSharedMemory() 204 | { 205 | for (std::size_t i = 0; i < mmapBlockCount; i++) { 206 | if (munmap(sharedMem[i].data, sharedMem[i].length) == -1) { 207 | perror("Fail to munmap"); 208 | } 209 | } 210 | return; 211 | } 212 | 213 | bool Camera::Device::startSample() 214 | { 215 | if (isRunning.load()) { 216 | return true; 217 | } 218 | v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 219 | if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) { 220 | perror("VIDIOC_STREAMON"); 221 | closeDevice(); 222 | return false; 223 | } 224 | /* start thread */ 225 | isRunning.store(1); 226 | sampleThread = std::thread(&Camera::Device::onSample, this); 227 | decoder->start(); 228 | return true; 229 | } 230 | 231 | bool Camera::Device::stopSample() 232 | { 233 | if (isRunning.load()) { 234 | isRunning.store(0); 235 | v4l2_buf_type type; 236 | type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 237 | if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) { 238 | perror("Fail to ioctl 'VIDIOC_STREAMOFF'"); 239 | } 240 | sampleThread.join(); 241 | decoder->stop(); 242 | } 243 | return true; 244 | } 245 | 246 | void Camera::Device::closeDevice() 247 | { 248 | /* dettach shared memory */ 249 | dettachSharedMemory(); 250 | /* close device */ 251 | if (fd != -1) { 252 | close(fd); 253 | fd = -1; 254 | } 255 | return; 256 | } 257 | 258 | std::string Camera::Device::shellExecute(const std::string& command) 259 | { 260 | std::string result = ""; 261 | FILE *fpRead = popen(command.c_str(), "r"); 262 | char buf[1024]; 263 | memset(buf,'\0',sizeof(buf)); 264 | while (fgets(buf, 1024-1, fpRead)!=NULL) { 265 | result = buf; 266 | } 267 | if (fpRead != NULL) { 268 | pclose(fpRead); 269 | } 270 | auto it = result.find('\n'); 271 | result.erase(it); 272 | return result; 273 | } 274 | 275 | unsigned short Camera::Device::getVendorID(const char *name) 276 | { 277 | std::string cmd = Strings::format(1024, "cat /sys/class/video4linux/%s/device/modalias", name); 278 | /* usb:v2B16p6689d0100dcEFdsc02dp01ic0Eisc01ip00in00 */ 279 | std::string result = shellExecute(cmd); 280 | int i = result.find('v'); 281 | std::string vid = result.substr(i + 1, 4); 282 | return Strings::hexStringToInt16(vid); 283 | } 284 | unsigned short Camera::Device::getProductID(const char *name) 285 | { 286 | std::string cmd = Strings::format(1024, "cat /sys/class/video4linux/%s/device/modalias", name); 287 | /* usb:v2B16p6689d0100dcEFdsc02dp01ic0Eisc01ip00in00 */ 288 | std::string result = shellExecute(cmd); 289 | int i = result.find('p'); 290 | std::string pid = result.substr(i + 1, 4); 291 | return Strings::hexStringToInt16(pid); 292 | } 293 | 294 | std::vector Camera::Device::enumerate() 295 | { 296 | std::vector devPathList; 297 | DIR *dir; 298 | if ((dir = opendir("/dev")) == nullptr) { 299 | printf("failed to open /dev/\n"); 300 | return devPathList; 301 | } 302 | struct dirent *ptr = nullptr; 303 | while ((ptr=readdir(dir)) != nullptr) { 304 | if (ptr->d_type != DT_CHR) { 305 | continue; 306 | } 307 | if (std::string(ptr->d_name).find("video") == std::string::npos) { 308 | continue; 309 | } 310 | Camera::Property property; 311 | property.vendorID = Camera::Device::getVendorID((char*)ptr->d_name); 312 | property.productID = Camera::Device::getProductID((char*)ptr->d_name); 313 | 314 | std::string devPath = Strings::format(32, "/dev/%s", (char*)ptr->d_name); 315 | int index = devPath.find('\0'); 316 | property.path = devPath.substr(0, index); 317 | int fd = open(property.path.c_str(), O_RDWR | O_NONBLOCK, 0); 318 | /* check capability */ 319 | struct v4l2_fmtdesc fmtdesc; 320 | memset(&fmtdesc,0, sizeof(fmtdesc)); 321 | fmtdesc.index = 0; 322 | fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 323 | if (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)<=-1) { 324 | continue; 325 | } 326 | devPathList.push_back(property); 327 | close(fd); 328 | } 329 | return devPathList; 330 | } 331 | 332 | std::vector Camera::Device::getPixelFormatList(const std::string &path) 333 | { 334 | std::vector formatList; 335 | int fd = open(path.c_str(), O_RDWR | O_NONBLOCK, 0); 336 | if (fd < 0) { 337 | printf("fail to open device. fd = %d\n", fd); 338 | return formatList; 339 | } 340 | 341 | struct v4l2_capability cap; 342 | memset(&cap, 0, sizeof(cap)); 343 | if (ioctl(fd, VIDIOC_QUERYCAP, &cap)<0) { 344 | perror("VIDIOC_QUERYCAP fail"); 345 | close(fd); 346 | return formatList; 347 | } 348 | if (!(cap.capabilities & V4L2_BUF_TYPE_VIDEO_CAPTURE)) { 349 | perror("not V4L2_BUF_TYPE_VIDEO_CAPTURE"); 350 | close(fd); 351 | return formatList; 352 | } 353 | if (!(cap.capabilities & V4L2_CAP_STREAMING)) { 354 | perror("not V4L2_CAP_STREAMING"); 355 | close(fd); 356 | return formatList; 357 | } 358 | struct v4l2_fmtdesc fmtdesc; 359 | memset(&fmtdesc,0, sizeof(fmtdesc)); 360 | fmtdesc.index = 0; 361 | fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 362 | while (1) { 363 | int ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc); 364 | if (ret == -1) { 365 | printf("%s\n",strerror(errno)); 366 | break; 367 | } 368 | std::string description = std::string((char*)fmtdesc.description); 369 | PixelFormat pixelFormat; 370 | if (description.find(CAMERA_PIXELFORMAT_JPEG) != std::string::npos) { 371 | pixelFormat.formatString = CAMERA_PIXELFORMAT_JPEG; 372 | } else if (description.find(CAMERA_PIXELFORMAT_YUYV) != std::string::npos) { 373 | pixelFormat.formatString = CAMERA_PIXELFORMAT_YUYV; 374 | } else { 375 | pixelFormat.formatString = description; 376 | } 377 | pixelFormat.formatInt = fmtdesc.pixelformat; 378 | formatList.push_back(pixelFormat); 379 | fmtdesc.index++; 380 | } 381 | close(fd); 382 | return formatList; 383 | } 384 | 385 | std::vector Camera::Device::getResolutionList(const std::string &path, const std::string &pixelFormat) 386 | { 387 | std::vector resList; 388 | /* get pixel format list */ 389 | std::vector pixelFormatList = Camera::Device::getPixelFormatList(path); 390 | if (pixelFormatList.empty()) { 391 | return resList; 392 | } 393 | 394 | bool hasPixelFormat = false; 395 | unsigned int pixelFormatInt = 0; 396 | for (std::size_t i = 0; i < pixelFormatList.size(); i++) { 397 | if (pixelFormatList[i].formatString == pixelFormat) { 398 | hasPixelFormat = true; 399 | pixelFormatInt = pixelFormatList[i].formatInt; 400 | break; 401 | } 402 | } 403 | if (!hasPixelFormat) { 404 | return resList; 405 | } 406 | 407 | int fd = open(path.c_str(), O_RDWR | O_NONBLOCK, 0); 408 | if (fd < 0) { 409 | printf("fail to open device. fd = %d\n", fd); 410 | return resList; 411 | } 412 | std::set resSet; 413 | struct v4l2_frmsizeenum frmsize; 414 | frmsize.pixel_format = pixelFormatInt; 415 | frmsize.index = 0; 416 | while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0){ 417 | std::string res; 418 | if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE || frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE){ 419 | res = Strings::format(16, "%d*%d", frmsize.discrete.width, frmsize.discrete.height); 420 | int index = res.find('\0'); 421 | resSet.insert(res.substr(0, index)); 422 | } 423 | frmsize.index++; 424 | } 425 | close(fd); 426 | resList = std::vector(resSet.begin(), resSet.end()); 427 | std::sort(resList.begin(), resList.end(), [](const std::string &res1, const std::string &res2){ 428 | std::vector params1 = Strings::split(res1, "*"); 429 | std::vector params2 = Strings::split(res2, "*"); 430 | return std::atoi(params1[0].c_str()) * std::atoi(params1[1].c_str()) > 431 | std::atoi(params2[0].c_str()) * std::atoi(params2[1].c_str()); 432 | }); 433 | return resList; 434 | } 435 | 436 | int Camera::Device::openPath(const std::string &path, const std::string &format, const std::string &res) 437 | { 438 | fd = openDevice(path); 439 | if (fd < 0) { 440 | return -3; 441 | } 442 | usleep(500000); 443 | /* set format */ 444 | std::vector resList = Strings::split(res, "*"); 445 | int w = std::atoi(resList[0].c_str()); 446 | int h = std::atoi(resList[1].c_str()); 447 | if (Camera::Device::setFormat(w, h, format) == false) { 448 | printf("failed to setFormat.\n"); 449 | return -4; 450 | } 451 | /* attach shared memory */ 452 | if (attachSharedMemory() == false) { 453 | printf("fail to attachSharedMemory\n"); 454 | return -5; 455 | } 456 | /* start sample */ 457 | if (startSample() == false) { 458 | std::cout<<"fail to sample"< devList = Camera::Device::enumerate(); 482 | if (devList.empty()) { 483 | return -1; 484 | } 485 | /* get path by vid pid */ 486 | Camera::Property dev; 487 | for (std::size_t i = 0; i < devList.size(); i++) { 488 | if (devList[i].vendorID == vid && devList[i].productID == pid) { 489 | dev = devList[i]; 490 | } 491 | } 492 | if (dev.path.empty()) { 493 | return -2; 494 | } 495 | std::cout<<"dev path:"< pixelFormatList = Camera::Device::getPixelFormatList(dev.path); 498 | if (pixelFormatList.empty()) { 499 | return -3; 500 | } 501 | /* get resolution */ 502 | std::vector resList = Camera::Device::getResolutionList(dev.path, pixelFormat); 503 | if (resList.empty()) { 504 | return -4; 505 | } 506 | if (resIndex >= resList.size()) { 507 | resIndex = 0; 508 | } 509 | resolutionMap[pixelFormat] = resList; 510 | devPath = dev.path; 511 | return openPath(dev.path, pixelFormat, resList[resIndex]); 512 | } 513 | 514 | void Camera::Device::stop() 515 | { 516 | stopSample(); 517 | /* clear */ 518 | closeDevice(); 519 | 520 | usleep(1000000); 521 | return; 522 | } 523 | 524 | void Camera::Device::clear() 525 | { 526 | formatList.clear(); 527 | resolutionMap.clear(); 528 | return; 529 | } 530 | 531 | void Camera::Device::restart(const std::string &format, const std::string &res) 532 | { 533 | stop(); 534 | start(devPath, format, res); 535 | return; 536 | } 537 | 538 | void Camera::Device::setParam(unsigned int controlID, int value) 539 | { 540 | v4l2_queryctrl queryctrl; 541 | queryctrl.id = controlID; 542 | if (ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl) == -1) { 543 | if (errno != EINVAL) { 544 | return; 545 | } else { 546 | std::cout<<"ERROR :: Unable to set property (NOT SUPPORTED)\n"; 547 | return; 548 | } 549 | } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { 550 | std::cout<<"ERROR :: Unable to set property (DISABLED).\n"; 551 | return; 552 | } else { 553 | v4l2_control control{controlID, value}; 554 | if (ioctl(fd, VIDIOC_S_CTRL, &control) == -1) { 555 | std::cout<<"Failed to set property."; 556 | return; 557 | } 558 | control.value = 0; 559 | if (ioctl(fd, VIDIOC_G_CTRL, &control) == -1) { 560 | std::cout<<"Failed to get property."; 561 | } 562 | } 563 | return; 564 | } 565 | 566 | int Camera::Device::getParamRange(unsigned int controlID, int modeID, Param ¶m) 567 | { 568 | v4l2_queryctrl queryctrl; 569 | queryctrl.id = controlID; 570 | 571 | if (ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl) == -1) { 572 | if (errno != EINVAL) { 573 | return -1; 574 | } else { 575 | std::cout<<"ERROR :: Unable to get property (NOT SUPPORTED)\n"; 576 | return -1; 577 | } 578 | } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { 579 | std::cout<<"ERROR :: Unable to get property (DISABLED).\n"; 580 | return -2; 581 | } 582 | struct v4l2_control ctrl; 583 | ctrl.id = controlID; 584 | ctrl.value = 0; 585 | if (ioctl(fd, VIDIOC_G_CTRL, &ctrl) == -1) { 586 | std::cout<<"Failed to get value."; 587 | } 588 | struct v4l2_control ctrlMode; 589 | ctrlMode.id = modeID; 590 | ctrlMode.value = 0; 591 | if (ioctl(fd, VIDIOC_G_CTRL, &ctrlMode) == -1) { 592 | std::cout<<"Failed to get control mode."; 593 | } 594 | param.minVal = queryctrl.minimum; 595 | param.maxVal = queryctrl.maximum; 596 | param.defaultVal = queryctrl.default_value; 597 | param.step = queryctrl.step; 598 | param.value = ctrl.value; 599 | param.flag = ctrlMode.value; 600 | return 0; 601 | } 602 | 603 | int Camera::Device::getParam(unsigned int controlID) 604 | { 605 | v4l2_control ctrl{controlID, 0}; 606 | if (ioctl(fd, VIDIOC_G_CTRL, &ctrl) == -1) { 607 | return -1; 608 | } 609 | return ctrl.value; 610 | } 611 | 612 | void Camera::Device::setWhiteBalanceMode(int value) 613 | { 614 | setParam(V4L2_CID_AUTO_WHITE_BALANCE, value); 615 | } 616 | 617 | int Camera::Device::getWhiteBalanceMode() 618 | { 619 | return getParam(V4L2_CID_AUTO_WHITE_BALANCE); 620 | } 621 | 622 | void Camera::Device::setWhiteBalanceTemperature(int value) 623 | { 624 | setParam(V4L2_CID_WHITE_BALANCE_TEMPERATURE, value); 625 | return; 626 | } 627 | 628 | int Camera::Device::getWhiteBalanceTemperature() 629 | { 630 | return getParam(V4L2_CID_WHITE_BALANCE_TEMPERATURE); 631 | } 632 | 633 | void Camera::Device::setBrightnessMode(int value) 634 | { 635 | setParam(V4L2_CID_AUTOBRIGHTNESS, value); 636 | } 637 | 638 | int Camera::Device::getBrightnessMode() 639 | { 640 | return getParam(V4L2_CID_AUTOBRIGHTNESS); 641 | } 642 | 643 | void Camera::Device::setBrightness(int value) 644 | { 645 | setParam(V4L2_CID_BRIGHTNESS, value); 646 | } 647 | 648 | int Camera::Device::getBrightness() 649 | { 650 | return getParam(V4L2_CID_BRIGHTNESS); 651 | } 652 | 653 | void Camera::Device::setContrast(int value) 654 | { 655 | setParam(V4L2_CID_CONTRAST, value); 656 | } 657 | 658 | int Camera::Device::getContrast() 659 | { 660 | return getParam(V4L2_CID_CONTRAST); 661 | } 662 | 663 | void Camera::Device::setSaturation(int value) 664 | { 665 | setParam(V4L2_CID_SATURATION, value); 666 | } 667 | 668 | int Camera::Device::getSaturation() 669 | { 670 | return getParam(V4L2_CID_SATURATION); 671 | } 672 | 673 | void Camera::Device::setHue(int value) 674 | { 675 | setParam(V4L2_CID_HUE, value); 676 | } 677 | 678 | int Camera::Device::getHue() 679 | { 680 | return getParam(V4L2_CID_HUE); 681 | } 682 | 683 | void Camera::Device::setSharpness(int value) 684 | { 685 | setParam(V4L2_CID_SHARPNESS, value); 686 | } 687 | 688 | int Camera::Device::getSharpness() 689 | { 690 | return getParam(V4L2_CID_SHARPNESS); 691 | } 692 | 693 | void Camera::Device::setBacklightCompensation(int value) 694 | { 695 | setParam(V4L2_CID_BACKLIGHT_COMPENSATION, value); 696 | } 697 | 698 | int Camera::Device::getBacklightCompensation() 699 | { 700 | return getParam(V4L2_CID_BACKLIGHT_COMPENSATION); 701 | } 702 | 703 | void Camera::Device::setGamma(int value) 704 | { 705 | setParam(V4L2_CID_GAMMA, value); 706 | } 707 | 708 | int Camera::Device::getGamma() 709 | { 710 | return getParam(V4L2_CID_GAMMA); 711 | } 712 | 713 | void Camera::Device::setExposureMode(int value) 714 | { 715 | setParam(V4L2_CID_EXPOSURE_AUTO, value); 716 | } 717 | 718 | int Camera::Device::getExposureMode() 719 | { 720 | return getParam(V4L2_CID_EXPOSURE_AUTO); 721 | } 722 | 723 | void Camera::Device::setExposure(int value) 724 | { 725 | setParam(V4L2_CID_EXPOSURE, value); 726 | } 727 | 728 | int Camera::Device::getExposure() 729 | { 730 | return getParam(V4L2_CID_EXPOSURE); 731 | } 732 | 733 | void Camera::Device::setExposureAbsolute(int value) 734 | { 735 | setParam(V4L2_CID_EXPOSURE_ABSOLUTE, value); 736 | } 737 | 738 | int Camera::Device::getExposureAbsolute() 739 | { 740 | return getParam(V4L2_CID_EXPOSURE_ABSOLUTE); 741 | } 742 | 743 | void Camera::Device::setAutoGain(int value) 744 | { 745 | setParam(V4L2_CID_AUTOGAIN, value); 746 | } 747 | 748 | int Camera::Device::getAutoGain() 749 | { 750 | return getParam(V4L2_CID_AUTOGAIN); 751 | } 752 | 753 | void Camera::Device::setGain(int value) 754 | { 755 | setParam(V4L2_CID_GAIN, value); 756 | } 757 | 758 | int Camera::Device::getGain() 759 | { 760 | return getParam(V4L2_CID_GAIN); 761 | } 762 | 763 | void Camera::Device::setPowerLineFrequence(int value) 764 | { 765 | setParam(V4L2_CID_POWER_LINE_FREQUENCY, value); 766 | } 767 | 768 | int Camera::Device::getFrequency() 769 | { 770 | return getParam(V4L2_CID_POWER_LINE_FREQUENCY); 771 | } 772 | 773 | void Camera::Device::setDefaultParam() 774 | { 775 | setWhiteBalanceMode(0); 776 | setWhiteBalanceTemperature(4600); 777 | setBrightnessMode(0); 778 | setBrightness(0); 779 | setContrast(32); 780 | setSaturation(64); 781 | setHue(0); 782 | setSharpness(3); 783 | setBacklightCompensation(0); 784 | setGamma(200); 785 | setExposureMode(V4L2_EXPOSURE_MANUAL); 786 | setExposureAbsolute(1500); 787 | setAutoGain(1); 788 | setGain(0); 789 | setPowerLineFrequence(V4L2_CID_POWER_LINE_FREQUENCY_50HZ); 790 | return; 791 | } 792 | 793 | void Camera::Device::setParam(const Camera::DeviceParam ¶m) 794 | { 795 | setWhiteBalanceMode(param.whiteBalanceMode); 796 | setWhiteBalanceTemperature(param.whiteBalanceTemperature); 797 | setBrightnessMode(param.brightnessMode); 798 | setBrightness(param.brightness); 799 | setContrast(param.contrast); 800 | setSaturation(param.saturation); 801 | setHue(param.hue); 802 | setSharpness(param.sharpness); 803 | setBacklightCompensation(param.backlightCompensation); 804 | setGamma(param.gamma); 805 | setExposureMode(param.exposureMode); 806 | setExposureAbsolute(param.exposureAbsolute); 807 | setAutoGain(param.autoGain); 808 | setGain(param.gain); 809 | setPowerLineFrequence(param.powerLineFrequence); 810 | return; 811 | } 812 | --------------------------------------------------------------------------------