├── .gitignore ├── BasicStreaming ├── Makefile └── basic_streaming.c ├── GStreamer ├── gstreamer_install.sh ├── gstreamer_live.sh ├── gstreamer_save_jpeg.sh ├── gstreamer_save_video.sh └── readme.txt ├── LICENSE ├── README.md └── Trigger ├── Makefile ├── README.md ├── alvium_trigger.h ├── hardware_trigger.c └── software_trigger.c /.gitignore: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | *.d 3 | 4 | # Compiled Object files 5 | *.slo 6 | *.lo 7 | *.o 8 | *.obj 9 | 10 | # Precompiled Headers 11 | *.gch 12 | *.pch 13 | 14 | # Compiled Dynamic libraries 15 | *.so 16 | *.dylib 17 | *.dll 18 | 19 | # Fortran module files 20 | *.mod 21 | *.smod 22 | 23 | # Compiled Static libraries 24 | *.lai 25 | *.la 26 | *.a 27 | *.lib 28 | 29 | # Executables 30 | *.exe 31 | *.out 32 | *.app 33 | -------------------------------------------------------------------------------- /BasicStreaming/Makefile: -------------------------------------------------------------------------------- 1 | CFLAGS ?= -O2 -Wall -Wextra -pedantic -std=c99 2 | EXAMPLES = basic_streaming 3 | 4 | all: $(EXAMPLES) 5 | 6 | clean: 7 | rm $(EXAMPLES) 8 | 9 | .PHONY: all clean 10 | -------------------------------------------------------------------------------- /BasicStreaming/basic_streaming.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | static void exitError(const char *s) { 13 | perror(s); 14 | exit(EXIT_FAILURE); 15 | } 16 | 17 | static bool keepStreaming = true; 18 | static void sigint(int _) { 19 | (void)_; 20 | keepStreaming = false; 21 | } 22 | 23 | int main(int const argc, char const **argv) { 24 | if(argc < 2) { 25 | fprintf(stderr, "usage: %s \n", argv[0]); 26 | exit(1); 27 | } 28 | 29 | char const *const devName = argv[1]; 30 | int const cameraFd = open( devName, O_RDWR, 0 ); 31 | if(cameraFd == -1) { 32 | exitError("opening camera"); 33 | } 34 | 35 | // Query device capabilities to distinguish single- and multiplane devices 36 | struct v4l2_capability cap; 37 | if(-1 == ioctl(cameraFd, VIDIOC_QUERYCAP, &cap)) { 38 | exitError("VIDIOC_QUERYCAP"); 39 | } 40 | 41 | enum v4l2_buf_type const bufferType = cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE 42 | ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE : V4L2_BUF_TYPE_VIDEO_CAPTURE; 43 | 44 | // Request buffers for streaming 45 | struct v4l2_requestbuffers reqBufs = { 46 | .count = 4, 47 | .type = bufferType, 48 | .memory = V4L2_MEMORY_MMAP 49 | }; 50 | 51 | if(ioctl(cameraFd, VIDIOC_REQBUFS, &reqBufs) == -1) { 52 | exitError( "VIDIOC_REQBUFS" ); 53 | } 54 | 55 | // Map buffers into process memory and queue frames 56 | struct v4l2_plane plane; 57 | char *buffers[reqBufs.count]; 58 | for(unsigned bufIdx = 0; bufIdx < reqBufs.count; ++bufIdx) { 59 | struct v4l2_buffer buf = { 60 | .type = bufferType, 61 | .index = bufIdx, 62 | .memory = V4L2_MEMORY_MMAP 63 | }; 64 | 65 | if(bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { 66 | buf.m.planes = &plane; 67 | buf.length = 1; 68 | } 69 | 70 | int res = ioctl(cameraFd, VIDIOC_QUERYBUF, &buf); 71 | if(res < 0) { 72 | printf("%d\n", res); 73 | exitError( "VIDIOC_QUERYBUF" ); 74 | } 75 | 76 | buffers[bufIdx] = mmap( NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraFd, bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ? buf.m.planes[0].m.mem_offset : buf.m.offset); 77 | 78 | if(buffers[bufIdx] == MAP_FAILED) { 79 | exitError("mmap"); 80 | } 81 | 82 | if(ioctl( cameraFd, VIDIOC_QBUF, &buf ) == -1) { 83 | exitError("VIDIOC_QBUF"); 84 | } 85 | } 86 | 87 | // Start stream 88 | if(ioctl(cameraFd, VIDIOC_STREAMON, &bufferType) == -1) { 89 | exitError("VIDIOC_STREAMON"); 90 | } 91 | 92 | signal(SIGINT, &sigint); 93 | printf("Streaming (Ctrl-C to quit): "); 94 | fflush(stdout); 95 | while(keepStreaming) { 96 | struct v4l2_buffer buf = { 97 | .type = bufferType, 98 | .memory = V4L2_MEMORY_MMAP 99 | }; 100 | 101 | if(bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { 102 | buf.m.planes = &plane; 103 | buf.length = 1; 104 | } 105 | 106 | // Wait for frame and dequeue associated buffer 107 | if(ioctl(cameraFd, VIDIOC_DQBUF, &buf ) == -1) { 108 | if(errno == EINTR) { 109 | break; 110 | } 111 | exitError("VIDIOC_DQBUF"); 112 | } 113 | 114 | // Frame data is available in buffers[buf.index]! 115 | 116 | printf("*"); 117 | fflush(stdout); 118 | 119 | // Re-queue frame after we're done with it 120 | if(ioctl(cameraFd, VIDIOC_QBUF, &buf) == -1) { 121 | if(errno == EINTR) { 122 | break; 123 | } 124 | exitError("VIDIOC_QBUF"); 125 | } 126 | } 127 | 128 | printf("\nDone\n"); 129 | 130 | // stop capture 131 | if(ioctl(cameraFd, VIDIOC_STREAMOFF, &bufferType) == -1) { 132 | exitError("VIDIOC_STREAMOFF"); 133 | } 134 | 135 | // For simplicity of the example, we let the system implicitly munmap 136 | // buffers and close files at exit 137 | 138 | return 0; 139 | } 140 | -------------------------------------------------------------------------------- /GStreamer/gstreamer_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo apt-get update 3 | sudo apt-get install libgstreamer1.0-0 4 | sudo apt install gstreamer1.0-plugins-ugly gstreamer1.0-plugins-good gstreamer1.0-plugins-bad 5 | sudo apt-get install gstreamer1.0-tools 6 | -------------------------------------------------------------------------------- /GStreamer/gstreamer_live.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================= 4 | # Variables 5 | # ========================================================= 6 | 7 | DEVICE=0 8 | VIDEOSRC=v4l2src 9 | VIDEOSINK=ximagesink 10 | BOARD=0 11 | PIPELINE="echo -e \"Pipeline not defined.\"" 12 | 13 | NITROGEN=nitrogen 14 | NVIDIA=nvidia 15 | WANDBOARD=wandboard 16 | APALIS_IMX8=apalis_imx8 17 | # ========================================================= 18 | # Usage function 19 | # ========================================================= 20 | 21 | usage() { 22 | echo -e "Usage: ./gstreamer_live.sh -d -b \n" 23 | echo -e "Options:" 24 | echo -e "-b, --board Currently used board. e.g. -b $NVIDIA" 25 | echo -e " Options:" 26 | echo -e " $NITROGEN for Nitrogen boards" 27 | echo -e " $NVIDIA for NVIDIA boards" 28 | echo -e " $WANDBOARD for Wandboard boards" 29 | echo -e " $APALIS_IMX8 for Apalis iMX8 boards" 30 | echo -e "-d, --device Device to use. e.g. -d /dev/video3" 31 | echo -e "-h, --help Display help" 32 | } 33 | 34 | # ========================================================= 35 | # Parse command line parameters 36 | # ========================================================= 37 | 38 | POSITIONAL=() 39 | while [[ $# -gt 0 ]] 40 | do 41 | key="$1" 42 | 43 | case $key in 44 | -b|--board) 45 | BOARD="$2" 46 | shift 47 | shift 48 | ;; 49 | -d|--device) 50 | DEVICE="$2" 51 | shift 52 | shift 53 | ;; 54 | -h|--help) 55 | usage 56 | exit 57 | ;; 58 | *) 59 | echo "Unknown parameter: $1" 60 | usage 61 | exit 1 62 | ;; 63 | esac 64 | done 65 | 66 | set -- "${POSITIONAL[@]}" 67 | 68 | # ========================================================= 69 | # gstreamer command 70 | # ========================================================= 71 | 72 | if [ "$DEVICE" = 0 ]; then 73 | echo -e "No device specified. Exit.\n" 74 | usage 75 | exit 1 76 | fi 77 | 78 | if [ "$BOARD" = 0 ]; then 79 | echo -e "No board specified. Exit.\n" 80 | usage 81 | exit 1 82 | fi 83 | 84 | if ! [[ "$BOARD" =~ ^($NITROGEN|$NVIDIA|$WANDBOARD|$APALIS_IMX8)$ ]]; then 85 | echo -e "Unsupported board specified. Exit.\n" 86 | usage 87 | exit 1 88 | fi 89 | 90 | if [ "$BOARD" = "$APALIS_IMX8" ]; then 91 | VIDEOSINK=waylandsink 92 | fi 93 | 94 | echo "Using device" $DEVICE 95 | echo "Using board" $BOARD 96 | 97 | if [ "$BOARD" = "$NVIDIA" ]; then 98 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE ! video/x-raw, format=BGRx ! $VIDEOSINK" 99 | else 100 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE ! $VIDEOSINK" 101 | fi 102 | 103 | eval "$PIPELINE" 104 | -------------------------------------------------------------------------------- /GStreamer/gstreamer_save_jpeg.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================= 4 | # Variables 5 | # ========================================================= 6 | 7 | DEVICE=0 8 | VIDEOSRC=v4l2src 9 | OUTPUT=./out.jpeg 10 | BOARD=0 11 | PIPELINE="echo -e \"Pipeline not defined.\"" 12 | 13 | NITROGEN=nitrogen 14 | NVIDIA=nvidia 15 | WANDBOARD=wandboard 16 | APALIS_IMX8=apalis_imx8 17 | 18 | # ========================================================= 19 | # Usage function 20 | # ========================================================= 21 | 22 | usage() { 23 | echo -e "Will save a frame from v4l2 to a jpeg file" 24 | echo -e "Usage: ./gstreamer_live.sh -d -b [-o ]\n" 25 | echo -e "Options:" 26 | echo -e "-b, --board Currently used board. e.g. -b $NVIDIA" 27 | echo -e " Options:" 28 | echo -e " $NITROGEN for Nitrogen boards" 29 | echo -e " $NVIDIA for NVIDIA boards" 30 | echo -e " $WANDBOARD for Wandboard boards" 31 | echo -e " $APALIS_IMX8 for Apalis iMX8 boards" 32 | echo -e "-d, --device Device to use, e.g. -d /dev/video3" 33 | echo -e "-h Display help" 34 | echo -e "-o, --output Output filepath. e.g. -o out.jpeg" 35 | } 36 | 37 | if [ "$SUCCESS" = false ]; then 38 | usage 39 | exit 40 | fi 41 | 42 | # ========================================================= 43 | # Parse command line parameters 44 | # ========================================================= 45 | 46 | POSITIONAL=() 47 | while [[ $# -gt 0 ]] 48 | do 49 | key="$1" 50 | 51 | case $key in 52 | -b|--board) 53 | BOARD="$2" 54 | shift 55 | shift 56 | ;; 57 | -d|--device) 58 | DEVICE="$2" 59 | shift 60 | shift 61 | ;; 62 | -h|--help) 63 | usage 64 | exit 65 | ;; 66 | -o|--output) 67 | OUTPUT="$2" 68 | shift 69 | shift 70 | ;; 71 | *) 72 | echo "Unknown parameter: $1" 73 | usage 74 | exit 1 75 | ;; 76 | esac 77 | done 78 | 79 | set -- "${POSITIONAL[@]}" 80 | 81 | 82 | # ========================================================= 83 | # gstreamer command 84 | # ========================================================= 85 | 86 | if [ "$DEVICE" = 0 ]; then 87 | echo -e "No device specified. Exit.\n" 88 | usage 89 | exit 1 90 | fi 91 | 92 | if [ "$BOARD" = 0 ]; then 93 | echo -e "No board specified. Exit.\n" 94 | usage 95 | exit 1 96 | fi 97 | 98 | if ! [[ "$BOARD" =~ ^($NITROGEN|$NVIDIA|$APALIS_IMX8|$WANDBOARD)$ ]]; then 99 | echo -e "Unsupported board specified. Exit.\n" 100 | usage 101 | exit 1 102 | fi 103 | 104 | echo "Using device" $DEVICE 105 | echo "Using board" $BOARD 106 | echo "Output file" $OUTPUT 107 | 108 | if [ "$BOARD" = "$NVIDIA" ]; then 109 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE num-buffers=1 ! queue ! video/x-raw,format=BGRx ! jpegenc ! filesink location=$OUTPUT" 110 | else 111 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE num-buffers=1 ! queue ! video/x-raw,format=RGB ! jpegenc ! filesink location=$OUTPUT" 112 | fi 113 | 114 | if ! eval "$PIPELINE"; then 115 | echo -e "\nFailed to launch gstreamer. Is the right device specified?" 116 | fi 117 | -------------------------------------------------------------------------------- /GStreamer/gstreamer_save_video.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ========================================================= 4 | # Variables 5 | # ========================================================= 6 | 7 | DEVICE=0 8 | VIDEOSRC=v4l2src 9 | OUTPUT=capture.avi 10 | BOARD=0 11 | PIPELINE="echo -e \"Pipeline not defined.\"" 12 | 13 | NITROGEN=nitrogen 14 | NVIDIA=nvidia 15 | WANDBOARD=wandboard 16 | APALIS_IMX8=apalis_imx8 17 | 18 | # ========================================================= 19 | # Usage function 20 | # ========================================================= 21 | 22 | usage() { 23 | echo -e "Will save the videostream from v4l2 to an avi file encoded with h264." 24 | echo -e "Usage: ./gstreamer_save_video.sh -d -b [-o ]\n" 25 | echo -e "Options:" 26 | echo -e "-b, --board Currently used board. e.g. -b $NVIDIA" 27 | echo -e " Options:" 28 | echo -e " $NITROGEN for Nitrogen boards" 29 | echo -e " $NVIDIA for NVIDIA boards" 30 | echo -e " $WANDBOARD for Wandboard boards" 31 | echo -e " $APALIS_IMX8 for Wandboard boards" 32 | echo -e "-d, --device Device to use, e.g. -d /dev/video3" 33 | echo -e "-h, --help Display help" 34 | echo -e "-o, --output Output file" 35 | } 36 | 37 | # ========================================================= 38 | # Parse command line parameters 39 | # ========================================================= 40 | 41 | POSITIONAL=() 42 | while [[ $# -gt 0 ]] 43 | do 44 | key="$1" 45 | 46 | case $key in 47 | -b|--board) 48 | BOARD="$2" 49 | shift 50 | shift 51 | ;; 52 | -d|--device) 53 | DEVICE="$2" 54 | shift 55 | shift 56 | ;; 57 | -h|--help) 58 | usage 59 | exit 60 | ;; 61 | -o|--output) 62 | OUTPUT="$2" 63 | shift 64 | shift 65 | ;; 66 | *) 67 | echo "Unknown parameter: $1" 68 | usage 69 | exit 1 70 | ;; 71 | esac 72 | done 73 | 74 | set -- "${POSITIONAL[@]}" 75 | 76 | 77 | # ========================================================= 78 | # gstreamer command 79 | # ========================================================= 80 | 81 | if [ "$DEVICE" = 0 ]; then 82 | echo -e "No device specified. Exit.\n" 83 | usage 84 | exit 1 85 | fi 86 | 87 | if [ "$BOARD" = 0 ]; then 88 | echo -e "No board specified. Exit.\n" 89 | usage 90 | exit 1 91 | fi 92 | 93 | if ! [[ "$BOARD" =~ ^($NITROGEN|$NVIDIA|$APALIS_IMX8|$WANDBOARD)$ ]]; then 94 | echo -e "Unsupported board specified. Exit.\n" 95 | usage 96 | exit 1 97 | fi 98 | 99 | echo "Using device" $DEVICE 100 | echo "Using board" $BOARD 101 | echo "Output file" $OUTPUT 102 | 103 | echo -e "Stop capture by hitting Ctrl+C once. Please wait until gstreamer command is completed.\n" 104 | 105 | if [ "$BOARD" = "$NVIDIA" ]; then 106 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE ! video/x-raw,format=BGRx ! videoscale ! video/x-raw,width=800,height=600 ! videoconvert ! videorate ! video/x-raw,framerate=30/1 ! x264enc ! avimux ! filesink location="$OUTPUT" -e" 107 | else 108 | PIPELINE="gst-launch-1.0 $VIDEOSRC device=$DEVICE ! videoscale ! videoconvert ! video/x-raw,width=800,height=600,format=RGB ! videoconvert ! videorate ! video/x-raw,framerate=30/1 ! x264enc ! avimux ! filesink location="$OUTPUT" -e" 109 | fi 110 | 111 | eval "$PIPELINE" 112 | -------------------------------------------------------------------------------- /GStreamer/readme.txt: -------------------------------------------------------------------------------- 1 | This folder contains some scripts for basic gstreamer usage. 2 | 3 | gstreamer_install.sh: Installs gstreamer. 4 | gstreamer_live.sh: Will display a live video stream. 5 | gstreamer_save_jpeg.sh: Will save a single frame from the live stream as jpeg. 6 | gstreamer_save_video.sh: Will save the live stream a h264 encoded video. 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Allied Vision CSI examples 2 | This repository contains code examples for Allied Vision Alvium CSI-2 camera and V4L2. 3 | 4 | ## Beta Disclaimer 5 | 6 | Please be aware that all code revisions not explicitly listed in the Github Release section are 7 | considered a **Beta Version**. 8 | 9 | For Beta Versions, the following applies in addition to the GPLv2 License: 10 | 11 | THE SOFTWARE IS PRELIMINARY AND STILL IN TESTING AND VERIFICATION PHASE AND IS PROVIDED ON AN “AS 12 | IS” AND “AS AVAILABLE” BASIS AND IS BELIEVED TO CONTAIN DEFECTS. THE PRIMARY PURPOSE OF THIS EARLY 13 | ACCESS IS TO OBTAIN FEEDBACK ON PERFORMANCE AND THE IDENTIFICATION OF DEFECTS IN THE SOFTWARE, 14 | HARDWARE AND DOCUMENTATION. 15 | -------------------------------------------------------------------------------- /Trigger/Makefile: -------------------------------------------------------------------------------- 1 | CFLAGS ?= -O2 -Wall -Wextra -pedantic 2 | EXAMPLES = software_trigger hardware_trigger 3 | 4 | all: $(EXAMPLES) 5 | 6 | clean: 7 | rm $(EXAMPLES) 8 | 9 | .PHONY: all clean 10 | -------------------------------------------------------------------------------- /Trigger/README.md: -------------------------------------------------------------------------------- 1 | # Triggering examples for Allied Vision MIPI CSI-2 cameras 2 | 3 | ## Overview 4 | V4L2 doesn’t provide controls for triggering like in GenICam. To enable hardware and software triggering, Allied Vision provides custom controls defined in alvium_trigger.h. As a starting point for your application, we provide examples in C for hardware and software triggering. 5 | 6 | ## Supported driver and camera firmware 7 | ### Supported driver 8 | https://github.com/alliedvision/alvium-jetson-driver-release 9 | You can use this example version with Jetson driver version r36.4.3-avt1.0 for Jetpack 6.2, 10 | 11 | ### Supported camera firmware 12 | Please use firmware version 13 or higher. To obtain the latest firmware for your Allied Vision MIPI CSI-2 camera, please contact our technical support team: 13 | https://www.alliedvision.com/en/support/contact-support-and-repair.html 14 | 15 | ## Available V4L2 controls 16 | All custom V4L2 request identifiers and enums for triggering are defined in alvium_trigger.h. 17 | Supported functionalities: 18 | * Hardware trigger through GPIOs (two selectable GPIOs), edge or level activation 19 | * Software trigger through I2C 20 | * Available trigger selector: Single frame start 21 | 22 | Trigger delay is currently not supported. 23 | 24 | 25 | ## Limitations 26 | The NVIDIA Jetson boards have several limitations that impact triggering: 27 | * NVIDIA's video input (VI) unit drivers are optimized to maximize troughput. When working with single frames, this behaviour may result in a higher latency. 28 | 29 | 30 | ## Hardware triggering 31 | To access the GPIOs, see the user guide for your adapter board, section I/O connections. 32 | https://www.alliedvision.com/en/support/technical-documentation/accessory-documentation.html 33 | 34 | ## Exposure Active signals 35 | The Jetson driver supports Exposure Active signals. 36 | 37 | ## Troubleshooting 38 | 39 | * For supported camera firmware and JetPack versions, see above. 40 | 41 | * Aborting VIDIOC_DQBUF during triggered acquisition: 42 | VIDIOC_DQBUF blocks uninterruptably in kernel mode. Killing is not possible because the signal cannot be delivered while blocked in kernel mode. 43 | Workaround: 44 | Execute a trigger command to make DQBUF receive a frame and return. 45 | You can change the trigger configuration while waiting in DQBUF. 46 | When blocked on hardware trigger DQBUF: Change the trigger source to software in a second thread and generate a trigger event with V4L2_CID_TRIGGER_SOFTWARE. 47 | 48 | ## More information about triggering 49 | This Readme is intended to ease using the examples. For detailed information about triggering, see the latest user guide for your Alvium CSI-2 camera: 50 | https://www.alliedvision.com/en/support/technical-documentation/alvium-csi-2-documentation.html 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /Trigger/alvium_trigger.h: -------------------------------------------------------------------------------- 1 | /*============================================================================= 2 | Copyright (C) 2022 Allied Vision Technologies. All Rights Reserved. 3 | 4 | * This program is free software; you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation; either version 2 of the License, or 7 | * (at your option) any later version. 8 | 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | 14 | ----------------------------------------------------------------------------- 15 | 16 | File: alvium_trigger.h 17 | 18 | version: 1.0.0 19 | =============================================================================*/ 20 | 21 | #ifndef ALVIUM_TRIGGER_H 22 | #define ALVIUM_TRIGGER_H 23 | 24 | #include 25 | 26 | 27 | enum v4l2_triggeractivation 28 | { 29 | V4L2_TRIGGER_ACTIVATION_RISING_EDGE = 0, 30 | V4L2_TRIGGER_ACTIVATION_FALLING_EDGE = 1, 31 | V4L2_TRIGGER_ACTIVATION_ANY_EDGE = 2, 32 | V4L2_TRIGGER_ACTIVATION_LEVEL_HIGH = 3, 33 | V4L2_TRIGGER_ACTIVATION_LEVEL_LOW = 4 34 | }; 35 | 36 | enum v4l2_triggersource 37 | { 38 | V4L2_TRIGGER_SOURCE_LINE0 = 0, 39 | V4L2_TRIGGER_SOURCE_LINE1 = 1, 40 | V4L2_TRIGGER_SOURCE_LINE2 = 2, 41 | V4L2_TRIGGER_SOURCE_LINE3 = 3, 42 | V4L2_TRIGGER_SOURCE_SOFTWARE = 4 43 | }; 44 | 45 | struct v4l2_trigger_status 46 | { 47 | __u8 trigger_source; // v4l2_triggersource enum value 48 | __u8 trigger_activation; // v4l2_triggeractivation enum value 49 | __u8 trigger_mode_enabled; // Enable (1) or disable (0) trigger mode 50 | }; 51 | 52 | struct v4l2_trigger_rate 53 | { 54 | __u64 frames_per_period; // Number of frames per period 55 | __u64 period_sec; // Period in seconds 56 | }; 57 | 58 | /* Trigger mode to ON/OFF */ 59 | #define V4L2_CID_TRIGGER_MODE (V4L2_CID_CAMERA_CLASS_BASE+47) 60 | 61 | /* trigger activation: edge_rising, edge_falling, edge_any, level_high, level_low */ 62 | #define V4L2_CID_TRIGGER_ACTIVATION (V4L2_CID_CAMERA_CLASS_BASE+48) 63 | 64 | /* trigger source: software, gpio0, gpio1 */ 65 | #define V4L2_CID_TRIGGER_SOURCE (V4L2_CID_CAMERA_CLASS_BASE+49) 66 | 67 | /* Execute a software trigger */ 68 | #define V4L2_CID_TRIGGER_SOFTWARE (V4L2_CID_CAMERA_CLASS_BASE+50) 69 | 70 | /* Camera temperature readout */ 71 | #define V4L2_CID_DEVICE_TEMPERATURE (V4L2_CID_CAMERA_CLASS_BASE+51) 72 | 73 | // NVIDIA Tegra controls 74 | 75 | #define TEGRA_CAMERA_CID_BASE (V4L2_CTRL_CLASS_CAMERA | 0x2000) 76 | 77 | #define TEGRA_CAMERA_CID_VI_CAPTURE_TIMEOUT (TEGRA_CAMERA_CID_BASE+111) 78 | 79 | #endif 80 | -------------------------------------------------------------------------------- /Trigger/hardware_trigger.c: -------------------------------------------------------------------------------- 1 | #include "alvium_trigger.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | static const char *const FRAME_BIN_FILE = "frame.bin"; 13 | 14 | static void exitError( const char *s ) 15 | { 16 | perror( s ); 17 | exit( EXIT_FAILURE ); 18 | } 19 | 20 | static void usage(const char *name) 21 | { 22 | fprintf( stderr, "usage: %s [-s ] \n",name); 23 | exit( 1 ); 24 | } 25 | 26 | static __u32 get_bytesused(const struct v4l2_buffer * const buf) 27 | { 28 | if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 29 | { 30 | return buf->m.planes[0].bytesused; 31 | } 32 | 33 | return buf->bytesused; 34 | } 35 | 36 | static void set_timeout(int fd) 37 | { 38 | struct v4l2_control ctrl; 39 | ctrl.id = TEGRA_CAMERA_CID_VI_CAPTURE_TIMEOUT; 40 | ctrl.value = -1; 41 | 42 | ioctl(fd, VIDIOC_S_CTRL, &ctrl); 43 | 44 | } 45 | 46 | int main( int const argc, char const **argv ) 47 | { 48 | int opt; 49 | int subdevFd = -1; 50 | const char *subdevNode = NULL; 51 | 52 | while ((opt = getopt(argc,argv,":s:")) != -1) 53 | { 54 | switch (opt) { 55 | case 's': 56 | fprintf(stderr,"Using subdev %s\n",optarg); 57 | subdevNode = optarg; 58 | break; 59 | default: 60 | usage(argv[0]); 61 | break; 62 | } 63 | } 64 | 65 | if( optind + 1 != argc ) 66 | { 67 | usage(argv[0]); 68 | } 69 | 70 | char const *const devName = argv[optind]; 71 | int const cameraFd = open( devName, O_RDWR, 0 ); 72 | if( cameraFd == -1 ) 73 | { 74 | exitError( "opening camera" ); 75 | } 76 | 77 | if (subdevNode != NULL) { 78 | subdevFd = open( subdevNode, O_RDWR, 0 ); 79 | if( subdevFd == -1 ) 80 | { 81 | exitError( "opening subdev" ); 82 | } 83 | } 84 | 85 | struct v4l2_capability capability = { 0 }; 86 | 87 | if( ioctl( cameraFd, VIDIOC_QUERYCAP, &capability ) == -1 ) 88 | { 89 | exitError( "VIDIOC_REQBUFS" ); 90 | } 91 | 92 | int bufferType; 93 | 94 | if ( capability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE ) 95 | { 96 | bufferType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 97 | } 98 | else if ( capability.device_caps & V4L2_CAP_VIDEO_CAPTURE ) 99 | { 100 | bufferType = V4L2_BUF_TYPE_VIDEO_CAPTURE; 101 | } 102 | else 103 | { 104 | exitError( "Buffer type not supported" ); 105 | } 106 | 107 | set_timeout(cameraFd); 108 | 109 | // Request one buffer. Note: Some boards like the NVidia Jetson Nano return a minimum number 110 | // of frames. To work correctly, all frames need to be queued later. 111 | struct v4l2_requestbuffers reqBufs = { 112 | .count = 1, 113 | .type = bufferType, 114 | .memory = V4L2_MEMORY_MMAP 115 | }; 116 | 117 | if( ioctl( cameraFd, VIDIOC_REQBUFS, &reqBufs ) == -1 ) 118 | { 119 | exitError( "VIDIOC_REQBUFS" ); 120 | } 121 | 122 | // mmap buffers into application address space 123 | char *pBuffers[reqBufs.count]; 124 | for( unsigned bufIdx = 0; bufIdx < reqBufs.count; ++bufIdx ) 125 | { 126 | struct v4l2_buffer buf = { 127 | .type = bufferType, 128 | .memory = V4L2_MEMORY_MMAP, 129 | .index = bufIdx 130 | }; 131 | 132 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 133 | 134 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 135 | { 136 | buf.m.planes = planes; 137 | buf.length = VIDEO_MAX_PLANES; 138 | } 139 | 140 | if( ioctl( cameraFd, VIDIOC_QUERYBUF, &buf ) == -1 ) 141 | { 142 | exitError( "VIDIOC_QUERYBUF" ); 143 | } 144 | 145 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE && buf.length > 1) 146 | { 147 | exitError( "Only formats with on plane are supported" ); 148 | } 149 | 150 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 151 | pBuffers[bufIdx] = 152 | mmap( NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraFd, planes[0].m.mem_offset ); 153 | else 154 | pBuffers[bufIdx] = 155 | mmap( NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraFd, buf.m.offset ); 156 | 157 | if( pBuffers[bufIdx] == MAP_FAILED ) 158 | { 159 | exitError( "mmap" ); 160 | } 161 | } 162 | 163 | // Queue buffers. Even though we request only one, some boards cannot handle that. (See above). 164 | for( unsigned i = 0; i < reqBufs.count; ++i ) 165 | { 166 | struct v4l2_buffer buf = { 167 | .type = bufferType, 168 | .memory = V4L2_MEMORY_MMAP, 169 | .index = i 170 | }; 171 | 172 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 173 | 174 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 175 | { 176 | buf.m.planes = planes; 177 | buf.length = VIDEO_MAX_PLANES; 178 | } 179 | 180 | if( ioctl( cameraFd, VIDIOC_QBUF, &buf ) == -1 ) 181 | { 182 | exitError( "VIDIOC_QBUF" ); 183 | } 184 | } 185 | 186 | int const ctrlFd = subdevFd == -1 ? cameraFd : subdevFd; 187 | 188 | // Set trigger mode 189 | struct v4l2_control enable_trigger = {.id= V4L2_CID_TRIGGER_MODE, .value=1}; 190 | if( ioctl( ctrlFd, VIDIOC_S_CTRL, &enable_trigger ) == -1 ) 191 | { 192 | exitError( "enabling trigger mode" ); 193 | } 194 | 195 | // Set trigger source 196 | int const source = V4L2_TRIGGER_SOURCE_LINE0; 197 | struct v4l2_control set_trigger_source = {.id=V4L2_CID_TRIGGER_SOURCE, .value=source}; 198 | if( ioctl(ctrlFd, VIDIOC_S_CTRL, &set_trigger_source) == -1 ) 199 | { 200 | exitError( "setting trigger source" ); 201 | } 202 | 203 | // Set trigger activation 204 | int const activation = V4L2_TRIGGER_ACTIVATION_RISING_EDGE; 205 | // int const activation = V4L2_TRIGGER_ACTIVATION_FALLING_EDGE; 206 | // int const activation = V4L2_TRIGGER_ACTIVATION_ANY_EDGE; 207 | // int const activation = V4L2_TRIGGER_ACTIVATION_LEVEL_HIGH; 208 | // int const activation = V4L2_TRIGGER_ACTIVATION_LEVEL_LOW; 209 | struct v4l2_control set_trigger_activation = {.id=V4L2_CID_TRIGGER_ACTIVATION, .value=activation}; 210 | if( ioctl(ctrlFd, VIDIOC_S_CTRL, &set_trigger_activation) == -1 ) 211 | { 212 | exitError( "setting trigger activation" ); 213 | } 214 | 215 | 216 | // Start stream 217 | 218 | if( ioctl( cameraFd, VIDIOC_STREAMON, &bufferType ) == -1 ) 219 | { 220 | exitError( "VIDIOC_STREAMON" ); 221 | } 222 | 223 | // Wait for trigger and dequeue triggered buffer 224 | struct v4l2_buffer buf = { 225 | .type = bufferType, 226 | .memory = V4L2_MEMORY_MMAP 227 | }; 228 | 229 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 230 | 231 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 232 | { 233 | buf.m.planes = planes; 234 | buf.length = VIDEO_MAX_PLANES; 235 | } 236 | 237 | if( ioctl( cameraFd, VIDIOC_DQBUF, &buf ) == -1 ) 238 | { 239 | exitError( "VIDIOC_DQBUF" ); 240 | } 241 | 242 | // Save captured frame to file 243 | printf( "Frame captured, saving to file...\n" ); 244 | 245 | int const outputFd = open( FRAME_BIN_FILE, O_WRONLY | O_CREAT, 0644 ); 246 | if( outputFd == -1 ) 247 | { 248 | exitError( "open output file" ); 249 | } 250 | 251 | if( write( outputFd, pBuffers[buf.index], get_bytesused(&buf) ) == -1 ) 252 | { 253 | exitError( "writing frame to file" ); 254 | } 255 | 256 | printf( "Captured frame written to %s\n", FRAME_BIN_FILE ); 257 | 258 | // stop capture 259 | if( ioctl( cameraFd, VIDIOC_STREAMOFF, &bufferType ) == -1 ) 260 | { 261 | exitError( "VIDIOC_STREAMOFF" ); 262 | } 263 | 264 | // For simplicity of the example, we let the system implicitly munmap 265 | // buffers and close files at exit 266 | 267 | return 0; 268 | } 269 | -------------------------------------------------------------------------------- /Trigger/software_trigger.c: -------------------------------------------------------------------------------- 1 | #include "alvium_trigger.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | static const char *const FRAME_BIN_FILE = "frame.bin"; 13 | 14 | static void exitError( const char *s ) 15 | { 16 | perror( s ); 17 | exit( EXIT_FAILURE ); 18 | } 19 | 20 | static void usage(const char *name) 21 | { 22 | fprintf( stderr, "usage: %s [-s ] \n",name); 23 | exit( 1 ); 24 | } 25 | 26 | static __u32 get_bytesused(const struct v4l2_buffer * const buf) 27 | { 28 | if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 29 | { 30 | return buf->m.planes[0].bytesused; 31 | } 32 | 33 | return buf->bytesused; 34 | } 35 | 36 | static void set_timeout(int fd) 37 | { 38 | struct v4l2_control ctrl; 39 | ctrl.id = TEGRA_CAMERA_CID_VI_CAPTURE_TIMEOUT; 40 | ctrl.value = -1; 41 | 42 | ioctl(fd, VIDIOC_S_CTRL, &ctrl); 43 | 44 | } 45 | 46 | int main( int const argc, char const **argv ) 47 | { 48 | int opt; 49 | int subdevFd = -1; 50 | const char *subdevNode = NULL; 51 | 52 | while ((opt = getopt(argc,argv,":s:")) != -1) 53 | { 54 | switch (opt) { 55 | case 's': 56 | fprintf(stderr,"Using subdev %s\n",optarg); 57 | subdevNode = optarg; 58 | break; 59 | default: 60 | usage(argv[0]); 61 | break; 62 | } 63 | } 64 | 65 | if( optind + 1 != argc ) 66 | { 67 | usage(argv[0]); 68 | } 69 | 70 | char const *const devName = argv[optind]; 71 | int const cameraFd = open( devName, O_RDWR, 0 ); 72 | if( cameraFd == -1 ) 73 | { 74 | exitError( "opening camera" ); 75 | } 76 | 77 | if (subdevNode != NULL) { 78 | subdevFd = open( subdevNode, O_RDWR, 0 ); 79 | if( subdevFd == -1 ) 80 | { 81 | exitError( "opening subdev" ); 82 | } 83 | } 84 | 85 | struct v4l2_capability capability = { 0 }; 86 | 87 | if( ioctl( cameraFd, VIDIOC_QUERYCAP, &capability ) == -1 ) 88 | { 89 | exitError( "VIDIOC_REQBUFS" ); 90 | } 91 | 92 | int bufferType; 93 | 94 | if ( capability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE ) 95 | { 96 | bufferType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 97 | } 98 | else if ( capability.device_caps & V4L2_CAP_VIDEO_CAPTURE ) 99 | { 100 | bufferType = V4L2_BUF_TYPE_VIDEO_CAPTURE; 101 | } 102 | else 103 | { 104 | exitError( "Buffer type not supported" ); 105 | } 106 | 107 | set_timeout(cameraFd); 108 | 109 | // Request one buffer. Note: Some boards like the NVidia Jetson Nano return a minimum number 110 | // of frames. To work correctly, all frames need to be queued later. 111 | struct v4l2_requestbuffers reqBufs = { 112 | .count = 1, 113 | .type = bufferType, 114 | .memory = V4L2_MEMORY_MMAP 115 | }; 116 | 117 | if( ioctl( cameraFd, VIDIOC_REQBUFS, &reqBufs ) == -1 ) 118 | { 119 | exitError( "VIDIOC_REQBUFS" ); 120 | } 121 | 122 | // mmap buffers into application address space 123 | char *pBuffers[reqBufs.count]; 124 | for( unsigned bufIdx = 0; bufIdx < reqBufs.count; ++bufIdx ) 125 | { 126 | struct v4l2_buffer buf = { 127 | .type = bufferType, 128 | .memory = V4L2_MEMORY_MMAP, 129 | .index = bufIdx 130 | }; 131 | 132 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 133 | 134 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 135 | { 136 | buf.m.planes = planes; 137 | buf.length = VIDEO_MAX_PLANES; 138 | } 139 | 140 | if( ioctl( cameraFd, VIDIOC_QUERYBUF, &buf ) == -1 ) 141 | { 142 | exitError( "VIDIOC_QUERYBUF" ); 143 | } 144 | 145 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE && buf.length > 1) 146 | { 147 | exitError( "Only formats with on plane are supported" ); 148 | } 149 | 150 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 151 | pBuffers[bufIdx] = 152 | mmap( NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraFd, planes[0].m.mem_offset ); 153 | else 154 | pBuffers[bufIdx] = 155 | mmap( NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cameraFd, buf.m.offset ); 156 | 157 | if( pBuffers[bufIdx] == MAP_FAILED ) 158 | { 159 | exitError( "mmap" ); 160 | } 161 | } 162 | 163 | // Queue buffers. Even though we request only one, some boards cannot handle that. (See above). 164 | for( unsigned i = 0; i < reqBufs.count; ++i ) 165 | { 166 | struct v4l2_buffer buf = { 167 | .type = bufferType, 168 | .memory = V4L2_MEMORY_MMAP, 169 | .index = i 170 | }; 171 | 172 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 173 | 174 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 175 | { 176 | buf.m.planes = planes; 177 | buf.length = VIDEO_MAX_PLANES; 178 | } 179 | 180 | if( ioctl( cameraFd, VIDIOC_QBUF, &buf ) == -1 ) 181 | { 182 | exitError( "VIDIOC_QBUF" ); 183 | } 184 | } 185 | 186 | int const ctrlFd = subdevFd == -1 ? cameraFd : subdevFd; 187 | 188 | // Set trigger mode 189 | struct v4l2_control enable_trigger = {.id = V4L2_CID_TRIGGER_MODE, .value=1}; 190 | if( ioctl( ctrlFd, VIDIOC_S_CTRL, &enable_trigger ) == -1 ) 191 | { 192 | exitError( "enabling trigger mode" ); 193 | } 194 | 195 | // Set trigger source to software 196 | int const source = V4L2_TRIGGER_SOURCE_SOFTWARE; 197 | struct v4l2_control set_trigger_source = {.id=V4L2_CID_TRIGGER_SOURCE, .value=source}; 198 | if( ioctl(ctrlFd, VIDIOC_S_CTRL, &set_trigger_source ) == -1 ) 199 | { 200 | exitError( "setting trigger source to software" ); 201 | } 202 | 203 | // Start stream 204 | 205 | if( ioctl( cameraFd, VIDIOC_STREAMON, &bufferType ) == -1 ) 206 | { 207 | exitError( "VIDIOC_STREAMON" ); 208 | } 209 | 210 | // Wait for user to request a software trigger 211 | printf( "start trigger with [enter]" ); 212 | fflush( stdout ); 213 | getc( stdin ); 214 | 215 | struct v4l2_control trigger_software = {.id=V4L2_CID_TRIGGER_SOFTWARE}; 216 | if( ioctl(ctrlFd, VIDIOC_S_CTRL, &trigger_software) == -1 ) 217 | { 218 | exitError( "V4L2_CID_TRIGGER_SOFTWARE" ); 219 | } 220 | 221 | // Dequeue triggered buffer 222 | struct v4l2_buffer buf = { 223 | .type = bufferType, 224 | .memory = V4L2_MEMORY_MMAP 225 | }; 226 | 227 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; 228 | 229 | if (bufferType == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 230 | { 231 | buf.m.planes = planes; 232 | buf.length = VIDEO_MAX_PLANES; 233 | } 234 | 235 | if( ioctl( cameraFd, VIDIOC_DQBUF, &buf ) == -1 ) 236 | { 237 | exitError( "VIDIOC_DQBUF" ); 238 | } 239 | 240 | // Save captured frame to file 241 | printf( "Frame captured, saving to file...\n" ); 242 | 243 | int const outputFd = open( FRAME_BIN_FILE, O_WRONLY | O_CREAT, 0644 ); 244 | if( outputFd == -1 ) 245 | { 246 | exitError( "open output file" ); 247 | } 248 | 249 | if( write( outputFd, pBuffers[buf.index], get_bytesused(&buf) ) == -1 ) 250 | { 251 | exitError( "writing frame to file" ); 252 | } 253 | 254 | printf( "Captured frame written to %s\n", FRAME_BIN_FILE ); 255 | 256 | // stop capture 257 | if( ioctl( cameraFd, VIDIOC_STREAMOFF, &bufferType ) == -1 ) 258 | { 259 | exitError( "VIDIOC_STREAMOFF" ); 260 | } 261 | 262 | // For simplicity of the example, we let the system implicitly munmap 263 | // buffers and close files at exit 264 | 265 | return 0; 266 | } 267 | --------------------------------------------------------------------------------