├── .gitattributes ├── .gitignore ├── CMakeLists.txt ├── Dockerfile_Ubuntu2004 ├── Dockerfile_Ubuntu2204 ├── LICENSE ├── README.md ├── cmake ├── AddInstallRPATHSupport.cmake ├── AddUninstallTarget.cmake └── InstallBasicPackageFiles.cmake ├── cpp_tools ├── CMakeLists.txt ├── atis3-bridge │ ├── CMakeLists.txt │ ├── README.MD │ └── atis-bridge-sdk.cpp ├── calibration │ ├── CMakeLists.txt │ ├── README.MD │ ├── calibrate.cpp │ └── stereo-calibrate.cpp ├── hexViewer │ ├── CMakeLists.txt │ ├── README.MD │ └── vHexviewer.cpp ├── log2vid │ ├── CMakeLists.txt │ ├── README.MD │ └── log2vid.cpp ├── vFramer │ ├── CMakeLists.txt │ ├── README.MD │ ├── drawers.cpp │ ├── drawers.h │ ├── vFramer.cpp │ └── vFramer.xml ├── vPreProcess │ ├── CMakeLists.txt │ ├── README.MD │ ├── audio.h │ ├── imu.h │ ├── skin.h │ ├── vPreProcess.cpp │ └── vision.h ├── zynqGrabber │ ├── CMakeLists.txt │ ├── README.MD │ ├── hpuDevice.h │ ├── hpuInterface.h │ ├── vsctrlDevice.h │ ├── vsctrlInterface.h │ ├── zynqGrabber.ini │ └── zynqModule.cpp └── zynqGrabber_deprec │ ├── CMakeLists.txt │ ├── README.MD │ ├── bias_files │ ├── ATIS_OK_HS.ini │ ├── ATIS_OK_std.ini │ ├── ATIS_chiara.ini │ ├── ATIS_chris1.ini │ ├── ATIS_chris2.ini │ ├── ATIS_icubgenova02.ini │ ├── ATIS_orig.ini │ ├── chris_WS_current.ini │ └── icra_demo.ini │ ├── include │ ├── deviceRegisters.h │ ├── hpuInterface.h │ ├── skinController.h │ ├── visCtrlATIS1.h │ ├── visCtrlATIS3.h │ ├── visionController.h │ └── zynqGrabberModule.h │ ├── src │ ├── hpuInterface.cpp │ ├── skinController.cpp │ ├── visCtrlATIS1.cpp │ ├── visCtrlATIS3.cpp │ ├── visionController.cpp │ └── zynqGrabberModule.cpp │ ├── zynqGrabber.ini │ └── zynqGrabber.xml ├── documentation ├── 1viewer.md ├── README.md ├── SpiNNakerips.md ├── application_instructions │ ├── 3a.dualcam.md │ ├── 3b.autosaccade.md │ ├── 4balldemo.md │ ├── 5corners.md │ └── 6vergence.md ├── calibration.png ├── calibration │ ├── README.md │ ├── RealSense_conf.ini │ ├── stereoCalib.ini │ ├── stereoCalib.xml │ ├── stereo_calibration.md │ ├── supervisor.md │ └── video_checkboard.mp4 ├── camera_dumping.md ├── checkingcalibration.png ├── connect_to_zcb.md ├── datasets.md ├── docker.md ├── eventcodecs.md ├── example-module-py │ └── example-module.py ├── example-module │ ├── CMakeLists.txt │ ├── README.md │ ├── app_example-module.xml │ ├── example-module.cpp │ └── example-module.ini ├── example_module.md ├── full_installation.md ├── howtosetupSD.md ├── setup_yarpserver.md └── zynqGrabber.md ├── ev2 ├── CMakeLists.txt └── event-driven │ ├── algs.h │ ├── algs │ ├── corner.cpp │ ├── corner.h │ ├── flow.cpp │ ├── flow.h │ ├── surface.cpp │ └── surface.h │ ├── core.h │ ├── core │ ├── codec.cpp │ ├── codec.h │ ├── comms.cpp │ ├── comms.h │ ├── utilities.cpp │ └── utilities.h │ ├── vis.h │ └── vis │ ├── IPT.cpp │ ├── IPT.h │ ├── draw.cpp │ ├── draw.h │ ├── filters.cpp │ ├── filters.h │ ├── vDraw.h │ ├── vDrawSkin.h │ ├── vDraw_ISO.cpp │ ├── vDraw_basic.cpp │ └── vDraw_skin.cpp ├── gpl.txt ├── lgpl.txt ├── python_tools ├── README.md ├── ev2converter.py └── plot_imu_dump.py └── yarpdataplayer.patch /.gitattributes: -------------------------------------------------------------------------------- 1 | *.txt whitespace=trailing-space,space-before-tab,tab-in-indent 2 | *.md whitespace=trailing-space,space-before-tab,tab-in-indent 3 | *.dox whitespace=trailing-space,space-before-tab,tab-in-indent 4 | *.qml whitespace=trailing-space,space-before-tab,tab-in-indent 5 | *.msg whitespace=trailing-space,space-before-tab,tab-in-indent 6 | 7 | *.c whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=4 8 | *.h whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=4 9 | *.cpp whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=4 10 | *.hpp whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=4 11 | 12 | CMakeLists.txt whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=2 13 | *.cmake whitespace=trailing-space,space-before-tab,tab-in-indent,tabwidth=2 14 | 15 | *.png binary 16 | *.jpg binary 17 | *.gif binary 18 | *.tiff binary 19 | *.ico binary 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *.asv 3 | CMakeLists.txt.user 4 | CMakeLists.txt.user.* 5 | *.autosave 6 | *.orig 7 | *.swp 8 | .vscode 9 | 10 | # ignore all temporary files 11 | *~ 12 | 13 | #ignore objects and .a files 14 | *.[oa] 15 | 16 | #ignore the build directory 17 | build/ 18 | build-debug/ 19 | build-x86_64/ 20 | build-ubuntu/ 21 | wsl-build/ 22 | build-pc104/ 23 | debug/ 24 | 25 | *.autosave 26 | .idea/ 27 | documentation/images/* 28 | *.pyc 29 | 30 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # Copyright: (C) 2019 EDPR - Istituto Italiano di Tecnologia 2 | # Authors: Arren Glover, Daniele Domenichelli 3 | # CopyPolicy: Released under the terms of the GNU GPL v3.0. 4 | 5 | cmake_minimum_required(VERSION 3.5) 6 | project(event-driven 7 | LANGUAGES CXX C 8 | VERSION 2.0) 9 | include(GNUInstallDirs) 10 | 11 | #options 12 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_BINDIR}") 13 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_LIBDIR}") 14 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_LIBDIR}") 15 | set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) 16 | if(MSVC) 17 | set(CMAKE_DEBUG_POSTFIX "d") 18 | endif() 19 | set(CMAKE_POSITION_INDEPENDENT_CODE ON) 20 | set(CMAKE_C_EXTENSIONS OFF) 21 | set(CMAKE_CXX_EXTENSIONS OFF) 22 | option(BUILD_SHARED_LIBS "Build libraries as shared as opposed to static" ON) 23 | set(CONTEXT_DIR event-driven) 24 | set(EVENTDRIVEN_LIBRARY event-driven) 25 | 26 | #YCM and cmake search path options 27 | message(STATUS "Install Directory: ${CMAKE_INSTALL_PREFIX}") 28 | find_package(YCM QUIET) 29 | if(YCM_FOUND) 30 | message(STATUS "Found YCM: (found version ${YCM_VERSION})") 31 | else() 32 | list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) 33 | endif() 34 | include(AddInstallRPATHSupport) 35 | add_install_rpath_support(BIN_DIRS "${CMAKE_INSTALL_FULL_BINDIR}" 36 | LIB_DIRS "${CMAKE_INSTALL_FULL_LIBDIR}" 37 | INSTALL_NAME_DIR "${CMAKE_INSTALL_FULL_LIBDIR}" 38 | USE_LINK_PATH) 39 | if(NOT CMAKE_CONFIGURATION_TYPES) 40 | if(NOT CMAKE_BUILD_TYPE) 41 | message(STATUS "Setting build type to 'Release' as none was specified.") 42 | set_property(CACHE CMAKE_BUILD_TYPE PROPERTY VALUE "Release") 43 | endif() 44 | endif() 45 | 46 | set(YARP_COMPONENTS os sig math dev) 47 | set(OpenCV_dep "") 48 | 49 | # opencv 50 | find_package(OpenCV QUIET) 51 | if(OpenCV_FOUND) 52 | message(STATUS "Found OpenCV: (found version ${OpenCV_VERSION})") 53 | set(OpenCV_dep OpenCV) 54 | list(APPEND YARP_COMPONENTS cv) 55 | endif() 56 | 57 | #YARP 58 | find_package(YARP COMPONENTS ${YARP_COMPONENTS} REQUIRED) 59 | yarp_configure_external_installation(event-driven) 60 | 61 | #metavision packages (prophesee is old version) 62 | find_package(MetavisionSDK COMPONENTS core driver QUIET) 63 | if(MetavisionSDK_FOUND) 64 | message(STATUS "Found MetavisionSDK: (found version ${MetavisionSDK_VERSION})") 65 | endif() 66 | find_package(prophesee_core QUIET) 67 | if(prophesee_core_FOUND) 68 | message(STATUS "Found prophesee_core: (found version ${prophesee_core_VERSION})") 69 | endif() 70 | 71 | #build the library 72 | 73 | 74 | #set options for the timing and encoding 75 | set(VLIB_ENABLE_TS OFF CACHE BOOL "events have individual timestamps") 76 | set(VLIB_CLOCK_PERIOD_NS 80 CACHE STRING "event timestamp clock period (ns)") 77 | set(VLIB_TIMER_BITS 30 CACHE STRING "event timestamp maximum = 2^TIMERBITS") 78 | string(COMPARE GREATER ${VLIB_TIMER_BITS} 31 TOOMANYBITSINCOUNTER) 79 | if(TOOMANYBITSINCOUNTER) 80 | message( FATAL_ERROR "Maximum bits useable is 31 (VLIB_TIMER_BITS)") 81 | endif() 82 | if(${VLIB_ENABLE_TS}) 83 | message(STATUS "ON: individual event timestamps") 84 | message(STATUS "EVENT CLOCK: ${VLIB_CLOCK_PERIOD_NS} ns") 85 | message(STATUS "MAX TIMESTAMP: ${VLIB_TIMER_BITS}/32 bits") 86 | else() 87 | message(STATUS "OFF: individual event timestamps") 88 | endif() 89 | 90 | add_subdirectory(ev2) 91 | 92 | #build additional tools 93 | set(VLIB_FORCESLIM OFF CACHE BOOL "force lightweight build") 94 | if(VLIB_FORCESLIM) 95 | message(STATUS "OFF: applications using opencv and drivers not compiled") 96 | set(OpenCV_FOUND OFF) 97 | set(MetavisionSDK_FOUND OFF) 98 | set(prophesee_core_FOUND OFF) 99 | endif() 100 | 101 | add_subdirectory(cpp_tools) 102 | 103 | #install the package 104 | include(InstallBasicPackageFiles) 105 | install_basic_package_files(${PROJECT_NAME} 106 | EXPORT eventdriven 107 | VERSION ${${PROJECT_NAME}_VERSION} 108 | COMPATIBILITY SameMajorVersion 109 | LOWERCASE_FILENAMES 110 | NAMESPACE ev:: 111 | DEPENDENCIES "YARP COMPONENTS os sig" ${OpenCV_dep} 112 | INCLUDE_CONTENT "yarp_configure_external_installation(event-driven NO_PATH_D)") 113 | 114 | include(AddUninstallTarget) 115 | -------------------------------------------------------------------------------- /Dockerfile_Ubuntu2004: -------------------------------------------------------------------------------- 1 | # base image Ubuntu 20.04 2 | FROM ubuntu:focal 3 | 4 | ENV DEBIAN_FRONTEND=noninteractive 5 | 6 | ARG CODE_DIR=/usr/local/src 7 | 8 | #basic environment 9 | RUN apt update && apt install -y \ 10 | ca-certificates \ 11 | build-essential \ 12 | git \ 13 | cmake \ 14 | cmake-curses-gui \ 15 | libace-dev \ 16 | libassimp-dev \ 17 | libglew-dev \ 18 | libglfw3-dev \ 19 | libglm-dev \ 20 | libeigen3-dev 21 | 22 | # Suggested dependencies for YARP 23 | RUN apt update && apt install -y \ 24 | qtbase5-dev qtdeclarative5-dev qtmultimedia5-dev \ 25 | qml-module-qtquick2 qml-module-qtquick-window2 \ 26 | qml-module-qtmultimedia qml-module-qtquick-dialogs \ 27 | qml-module-qtquick-controls qml-module-qt-labs-folderlistmodel \ 28 | qml-module-qt-labs-settings \ 29 | libqcustomplot-dev \ 30 | libgraphviz-dev \ 31 | libjpeg-dev \ 32 | libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \ 33 | gstreamer1.0-plugins-base \ 34 | gstreamer1.0-plugins-good \ 35 | gstreamer1.0-plugins-bad \ 36 | gstreamer1.0-libav 37 | 38 | # Add Metavision SDK 3.0 in sources.list 39 | RUN echo "deb [arch=amd64 trusted=yes] https://apt.prophesee.ai/dists/public/b4b3528d/ubuntu focal sdk" >> /etc/apt/sources.list &&\ 40 | apt update 41 | 42 | RUN apt install -y \ 43 | libcanberra-gtk-module \ 44 | mesa-utils \ 45 | ffmpeg \ 46 | libboost-program-options-dev \ 47 | libopencv-dev \ 48 | metavision-sdk 49 | 50 | # YCM 51 | ARG YCM_VERSION=v0.15.2 52 | RUN cd $CODE_DIR && \ 53 | git clone --depth 1 --branch $YCM_VERSION https://github.com/robotology/ycm.git && \ 54 | cd ycm && \ 55 | mkdir build && cd build && \ 56 | cmake .. && \ 57 | make -j `nproc` install 58 | 59 | # YARP 60 | ARG YARP_VERSION=v3.8.0 61 | RUN cd $CODE_DIR && \ 62 | git clone --depth 1 --branch $YARP_VERSION https://github.com/robotology/yarp.git &&\ 63 | cd yarp &&\ 64 | mkdir build && cd build &&\ 65 | cmake .. &&\ 66 | make -j `nproc` install 67 | 68 | EXPOSE 10000/tcp 10000/udp 69 | RUN yarp check 70 | 71 | 72 | # event-driven 73 | ARG ED_VERSION=main 74 | RUN cd $CODE_DIR &&\ 75 | git clone --depth 1 --branch $ED_VERSION https://github.com/robotology/event-driven.git &&\ 76 | cd event-driven &&\ 77 | mkdir build && cd build &&\ 78 | cmake .. &&\ 79 | make -j `nproc` install 80 | 81 | # Add User ID and Group ID 82 | ARG UNAME=event-driven 83 | ARG UID=1000 84 | ARG GID=1000 85 | RUN groupadd -g $GID -o $UNAME 86 | RUN useradd -m -u $UID -g $GID -o -s /bin/bash $UNAME 87 | 88 | # Change user and working directory 89 | USER $UNAME 90 | WORKDIR /home/${UNAME} -------------------------------------------------------------------------------- /Dockerfile_Ubuntu2204: -------------------------------------------------------------------------------- 1 | # base image Ubuntu 22.04 2 | FROM ubuntu:jammy 3 | 4 | ENV DEBIAN_FRONTEND=noninteractive 5 | 6 | ARG CODE_DIR=/usr/local/src 7 | 8 | #basic environment 9 | RUN apt update && apt install -y \ 10 | ca-certificates \ 11 | build-essential \ 12 | git \ 13 | cmake \ 14 | cmake-curses-gui \ 15 | libace-dev \ 16 | libassimp-dev \ 17 | libglew-dev \ 18 | libglfw3-dev \ 19 | libglm-dev \ 20 | libeigen3-dev 21 | 22 | # Suggested dependencies for YARP 23 | RUN apt update && apt install -y \ 24 | qtbase5-dev qtdeclarative5-dev qtmultimedia5-dev \ 25 | qml-module-qtquick2 qml-module-qtquick-window2 \ 26 | qml-module-qtmultimedia qml-module-qtquick-dialogs \ 27 | qml-module-qtquick-controls qml-module-qt-labs-folderlistmodel \ 28 | qml-module-qt-labs-settings \ 29 | libqcustomplot-dev \ 30 | libgraphviz-dev \ 31 | libjpeg-dev \ 32 | libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \ 33 | gstreamer1.0-plugins-base \ 34 | gstreamer1.0-plugins-good \ 35 | gstreamer1.0-plugins-bad \ 36 | gstreamer1.0-libav 37 | 38 | # Add Metavision SDK 4.6 in sources.list 39 | RUN echo "deb [arch=amd64 trusted=yes] https://apt.prophesee.ai/dists/public/baiTh5si/ubuntu jammy sdk" >> /etc/apt/sources.list &&\ 40 | apt update 41 | 42 | RUN apt install -y \ 43 | libcanberra-gtk-module \ 44 | mesa-utils \ 45 | ffmpeg \ 46 | libboost-program-options-dev \ 47 | libopencv-dev \ 48 | metavision-sdk 49 | 50 | # YCM 51 | ARG YCM_VERSION=v0.15.2 52 | RUN cd $CODE_DIR && \ 53 | git clone --depth 1 --branch $YCM_VERSION https://github.com/robotology/ycm.git && \ 54 | cd ycm && \ 55 | mkdir build && cd build && \ 56 | cmake .. && \ 57 | make -j `nproc` install 58 | 59 | # YARP 60 | ARG YARP_VERSION=v3.8.0 61 | RUN cd $CODE_DIR && \ 62 | git clone --depth 1 --branch $YARP_VERSION https://github.com/robotology/yarp.git &&\ 63 | cd yarp &&\ 64 | mkdir build && cd build &&\ 65 | cmake .. &&\ 66 | make -j `nproc` install 67 | 68 | EXPOSE 10000/tcp 10000/udp 69 | RUN yarp check 70 | 71 | 72 | # event-driven 73 | ARG ED_VERSION=main 74 | RUN cd $CODE_DIR &&\ 75 | git clone --depth 1 --branch $ED_VERSION https://github.com/robotology/event-driven.git &&\ 76 | cd event-driven &&\ 77 | mkdir build && cd build &&\ 78 | cmake .. &&\ 79 | make -j `nproc` install 80 | 81 | # Add User ID and Group ID 82 | ARG UNAME=event-driven 83 | ARG UID=1000 84 | ARG GID=1000 85 | RUN groupadd -g $GID -o $UNAME 86 | RUN useradd -m -u $UID -g $GID -o -s /bin/bash $UNAME 87 | 88 | # Change user and working directory 89 | USER $UNAME 90 | WORKDIR /home/${UNAME} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, Event Driven Perception for Robotics 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | | Read the [Documentation](http://robotology.github.io/event-driven/doxygen/doc/html/index.html) | Download the [Code](https://github.com/robotology/event-driven) | 2 | 3 | # event-driven 4 | 5 | _YARP integration for event-cameras and other neuromorphic sensors_ 6 | 7 | https://user-images.githubusercontent.com/9265237/222401464-73a9beaa-a1b6-4518-ae53-5bac5dfaeb9d.mp4 8 | 9 | Libraries that handle neuromorphic sensors, such as the dynamic vision sensor, installed on the iCub can be found here, along with algorithms to process the event-based data. Examples include, optical flow, corner detection and ball detection. Demo applications for the iCub robot, and tutorials for running them, include saccading and attention, gaze following a ball, and vergence control. 10 | ``` 11 | @article{Glover2017b, 12 | author = {Glover, Arren and Vasco, Valentina and Iacono, Massimiliano and Bartolozzi, Chiara}, 13 | doi = {10.3389/frobt.2017.00073}, 14 | journal = {Frontiers in Robotics and AI}, 15 | pages = {73}, 16 | title = {{The event-driven Software Library for YARP — With Algorithms and iCub Applications}}, 17 | volume = {4}, 18 | year = {2018} 19 | } 20 | ``` 21 | ## Libraries 22 | 23 | Event-driven libraries provide basic functionality for handling events in a YARP environment. The library has definitions for: 24 | * core 25 | * codecs to encode/decode events to be compatable with address event representation (AER) formats. 26 | * Sending packets of events in `ev::packet` that is compatible with yarpdatadumper and yarpdataplayer. 27 | * asynchronous reading and writing ports that ensure data is never lost and giving access to latency information. 28 | * helper functions to handle event timestamp wrapping and to convert between timestamps and seconds. 29 | * vision 30 | * filters for removing salt and pepper noise. 31 | * sparse event warping using camera intrinsic parameters and extrinsic parameters for a stereo-pair 32 | * methods to draw events onto the screen in a variety of methods 33 | * algorithms 34 | * event surfaces such as the Surface of Active Events (SAE), Polarity Integrated Images (PIM), and Exponentially Reduced Ordinal Surface (EROS) 35 | * corner detection 36 | * optical flow 37 | 38 | ## TOOLS 39 | 40 | * [**vFramer**](https://github.com/robotology/event-driven/tree/ev2-dev/cpp_tools/atis3-bridge) - visualisation of events streamed over a YARP port. Various methods for visualisation are available. 41 | * [**calibration**](https://github.com/robotology/event-driven/tree/ev2-dev/cpp_tools/calibration) - estimating the camera intrinsic parameters 42 | * [**vPreProcess**](https://github.com/robotology/event-driven/tree/ev2-dev/cpp_tools/vPreProcess) - splitting different event-types into separate event-streams, performing filtering, and simple augmentations (flipping etc.) 43 | * [**atis-bridge**](https://github.com/robotology/event-driven/tree/ev2-dev/cpp_tools/atis3-bridge) - bridge between the Prophesee ATIS cameras and YARP 44 | * [**zynqGrabber**](https://github.com/robotology/event-driven/tree/ev2-dev/cpp_tools/zynqGrabber) - bridge between zynq-based FPGA sensor interface and YARP 45 | 46 | ## Applications 47 | 48 | Applications that implement the algorithms available in `event-driven` are found in our companion repository 49 | 50 | [event-driven-demos](https://github.com/event-driven-robotics/event-driven-demos) 51 | 52 | ## How to Install: 53 | 54 | [Comprehensive instructions available for installation](http://robotology.github.io/event-driven/doxygen/doc/html/pages.html). 55 | 56 | ## References 57 | 58 | Glover, A., and Bartolozzi C. (2016) *Event-driven ball detection and gaze fixation in clutter*. In IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), October 2016, Daejeon, Korea. **Finalist for RoboCup Best Paper Award** 59 | 60 | Vasco V., Glover A., and Bartolozzi C. (2016) *Fast event-based harris corner detection exploiting the advantages of event-driven cameras*. In IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), October 2016, Daejeon, Korea. 61 | 62 | V. Vasco, A. Glover, Y. Tirupachuri, F. Solari, M. Chessa, and Bartolozzi C. *Vergence control with a neuromorphic iCub. In IEEE-RAS International Conference on Humanoid Robots (Humanoids)*, November 2016, Mexico. 63 | 64 | -------------------------------------------------------------------------------- /cmake/AddUninstallTarget.cmake: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2012-2021 Istituto Italiano di Tecnologia (IIT) 2 | # SPDX-FileCopyrightText: 2008-2013 Kitware Inc. 3 | # SPDX-License-Identifier: BSD-3-Clause 4 | 5 | #[=======================================================================[.rst: 6 | AddUninstallTarget 7 | ------------------ 8 | 9 | Add the "uninstall" target for your project:: 10 | 11 | include(AddUninstallTarget) 12 | 13 | 14 | will create a file ``cmake_uninstall.cmake`` in the build directory and add a 15 | custom target ``uninstall`` (or ``UNINSTALL`` on Visual Studio and Xcode) that 16 | will remove the files installed by your package (using 17 | ``install_manifest.txt``). 18 | See also 19 | https://gitlab.kitware.com/cmake/community/wikis/FAQ#can-i-do-make-uninstall-with-cmake 20 | 21 | The :module:`AddUninstallTarget` module must be included in your main 22 | ``CMakeLists.txt``. If included in a subdirectory it does nothing. 23 | This allows you to use it safely in your main ``CMakeLists.txt`` and include 24 | your project using ``add_subdirectory`` (for example when using it with 25 | :cmake:module:`FetchContent`). 26 | 27 | If the ``uninstall`` target already exists, the module does nothing. 28 | #]=======================================================================] 29 | 30 | 31 | # AddUninstallTarget works only when included in the main CMakeLists.txt 32 | if(NOT "${CMAKE_CURRENT_BINARY_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") 33 | return() 34 | endif() 35 | 36 | # The name of the target is uppercase in MSVC and Xcode (for coherence with the 37 | # other standard targets) 38 | if("${CMAKE_GENERATOR}" MATCHES "^(Visual Studio|Xcode)") 39 | set(_uninstall "UNINSTALL") 40 | else() 41 | set(_uninstall "uninstall") 42 | endif() 43 | 44 | # If target is already defined don't do anything 45 | if(TARGET ${_uninstall}) 46 | return() 47 | endif() 48 | 49 | 50 | set(_filename cmake_uninstall.cmake) 51 | 52 | file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/${_filename}" 53 | "if(NOT EXISTS \"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\") 54 | message(WARNING \"Cannot find install manifest: \\\"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\\\"\") 55 | return() 56 | endif() 57 | 58 | file(READ \"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\" files) 59 | string(STRIP \"\${files}\" files) 60 | string(REGEX REPLACE \"\\n\" \";\" files \"\${files}\") 61 | list(REVERSE files) 62 | foreach(file \${files}) 63 | if(IS_SYMLINK \"\$ENV{DESTDIR}\${file}\" OR EXISTS \"\$ENV{DESTDIR}\${file}\") 64 | message(STATUS \"Uninstalling: \$ENV{DESTDIR}\${file}\") 65 | execute_process( 66 | COMMAND \${CMAKE_COMMAND} -E remove \"\$ENV{DESTDIR}\${file}\" 67 | OUTPUT_VARIABLE rm_out 68 | RESULT_VARIABLE rm_retval) 69 | if(NOT \"\${rm_retval}\" EQUAL 0) 70 | message(FATAL_ERROR \"Problem when removing \\\"\$ENV{DESTDIR}\${file}\\\"\") 71 | endif() 72 | else() 73 | message(STATUS \"Not-found: \$ENV{DESTDIR}\${file}\") 74 | endif() 75 | endforeach(file) 76 | ") 77 | 78 | set(_desc "Uninstall the project...") 79 | if(CMAKE_GENERATOR STREQUAL "Unix Makefiles") 80 | set(_comment COMMAND \$\(CMAKE_COMMAND\) -E cmake_echo_color --switch=$\(COLOR\) --cyan "${_desc}") 81 | else() 82 | set(_comment COMMENT "${_desc}") 83 | endif() 84 | add_custom_target(${_uninstall} 85 | ${_comment} 86 | COMMAND ${CMAKE_COMMAND} -P ${_filename} 87 | USES_TERMINAL 88 | BYPRODUCTS uninstall_byproduct 89 | WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") 90 | set_property(SOURCE uninstall_byproduct PROPERTY SYMBOLIC 1) 91 | 92 | set_property(TARGET ${_uninstall} PROPERTY FOLDER "CMakePredefinedTargets") 93 | -------------------------------------------------------------------------------- /cpp_tools/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_subdirectory(zynqGrabber) 2 | add_subdirectory(hexViewer) 3 | 4 | #add_subdirectory(binaryDumper) 5 | #add_subdirectory(qadIMUcal) 6 | #add_subdirectory(esim-yarp) 7 | 8 | if(OpenCV_FOUND) 9 | add_subdirectory(vFramer) 10 | add_subdirectory(vPreProcess) 11 | add_subdirectory(calibration) 12 | add_subdirectory(log2vid) 13 | endif() 14 | 15 | if(prophesee_core_FOUND OR MetavisionSDK_FOUND) 16 | add_subdirectory(atis3-bridge) 17 | endif() 18 | -------------------------------------------------------------------------------- /cpp_tools/atis3-bridge/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | project(atis-bridge-sdk) 3 | 4 | add_executable(${PROJECT_NAME} atis-bridge-sdk.cpp) 5 | 6 | if(prophesee_core_FOUND) 7 | target_compile_definitions(${PROJECT_NAME} PRIVATE prophesee_core_FOUND) 8 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS 9 | YARP::YARP_init 10 | ev::event-driven 11 | prophesee_core 12 | prophesee_driver) 13 | else() 14 | target_compile_definitions(${PROJECT_NAME} PRIVATE MetavisionSDK_FOUND) 15 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS 16 | YARP::YARP_init 17 | ev::event-driven 18 | MetavisionSDK::core 19 | MetavisionSDK::driver) 20 | endif() 21 | 22 | target_compile_definitions(${PROJECT_NAME} PRIVATE $<$:ENABLE_TS=1>) 23 | 24 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 25 | -------------------------------------------------------------------------------- /cpp_tools/atis3-bridge/README.MD: -------------------------------------------------------------------------------- 1 | # atis-bridge 2 | 3 | A bridge application between Prophesee's ATIS camera SDK and YARP 4 | 5 | ### Usage 6 | 7 | `atis3-sdk-bridge` 8 | -------------------------------------------------------------------------------- /cpp_tools/calibration/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(ev-calibrate) 2 | 3 | add_executable(${PROJECT_NAME}-mono calibrate.cpp) 4 | 5 | target_include_directories(${PROJECT_NAME}-mono PRIVATE ${OpenCV_INCLUDE_DIRS}) 6 | target_link_libraries(${PROJECT_NAME}-mono PRIVATE YARP::YARP_os 7 | YARP::YARP_init 8 | ${OpenCV_LIBRARIES} 9 | ev::${EVENTDRIVEN_LIBRARY}) 10 | install(TARGETS ${PROJECT_NAME}-mono DESTINATION ${CMAKE_INSTALL_BINDIR}) 11 | 12 | add_executable(${PROJECT_NAME}-stereo stereo-calibrate.cpp) 13 | target_include_directories(${PROJECT_NAME}-stereo PRIVATE ${OpenCV_INCLUDE_DIRS}) 14 | target_link_libraries(${PROJECT_NAME}-stereo PRIVATE YARP::YARP_os 15 | YARP::YARP_init 16 | ${OpenCV_LIBRARIES} 17 | ev::${EVENTDRIVEN_LIBRARY}) 18 | install(TARGETS ${PROJECT_NAME}-stereo DESTINATION ${CMAKE_INSTALL_BINDIR}) 19 | 20 | -------------------------------------------------------------------------------- /cpp_tools/calibration/README.MD: -------------------------------------------------------------------------------- 1 | # calibration 2 | 3 | A simple opencv wrapper for calibration, combined with intellegent methods of forming images from events that enables calibration 4 | 5 | ### Usage 6 | `calibration` 7 | 8 | ### 9 | 10 | There were guidelines to do stereo calibration in the internet. Few of them listed here. 11 | 12 | - Specific distance of the calibration pattern from the camera shouldn't really matter. Calibration must be performed with fixed focus. 13 | - Having a checkerboard with more number of squares is beneficial as there are more corner points to extract. This enables us to get more 3D-2D point correspondences (Size of square shouldn't make a difference). 14 | - Different points of views and angles needed. Calibration detects focal length and distortion parameters by least square method, for which different angles of the checkerboard are needed to obtain a better solution. 15 | - Enough wide border around the calibration pattern is required. 16 | - A large pattern is better for cameras with larger baseline, as stereo overlap is lesser. 17 | - Some calibration toolboxes require the input of checkerboard dimensions to be either odd number x even number (eg 9x6) or vice versa (eg 8x7). 18 | - Preferably the checkerboard should cover at least half of the area of the image. 19 | - The calibration pattern must be moved to cover the entire FOV of the camera to obtain better calibration.Skew helps in focal length determination and moving the checkerboard towards the corners helps in determining the distortion co coefficients. 20 | - Good lighting conditions is of at most importance and is often overlooked. 21 | - Some sources say circles are easier to localize than corner points and using a circles pattern might lead to better calibration. 22 | - Humidity changes might affect normal paper that is used as it absorbs moisture. Thick paper must be used and the calibration pattern must be printed using a Laser printer and should be stuck on a glass backing preferably. 23 | 24 | -------------------------------------------------------------------------------- /cpp_tools/hexViewer/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(vHexviewer) 2 | 3 | add_executable(${PROJECT_NAME} vHexviewer.cpp) 4 | 5 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS 6 | YARP::YARP_init 7 | ev::${EVENTDRIVEN_LIBRARY}) 8 | 9 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 10 | -------------------------------------------------------------------------------- /cpp_tools/hexViewer/README.MD: -------------------------------------------------------------------------------- 1 | # hex-view 2 | 3 | A terminal-based application to read and debug event-streams 4 | 5 | ### Usage 6 | 7 | `hexView` 8 | -------------------------------------------------------------------------------- /cpp_tools/log2vid/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(vLog2vid) 2 | 3 | add_executable(${PROJECT_NAME} log2vid.cpp) 4 | 5 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_OS 6 | YARP::YARP_init 7 | ${OpenCV_LIBRARIES} 8 | ev::${EVENTDRIVEN_LIBRARY}) 9 | 10 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 11 | -------------------------------------------------------------------------------- /cpp_tools/log2vid/README.MD: -------------------------------------------------------------------------------- 1 | # log2vid 2 | 3 | Turn your event log into a video! 4 | 5 | ### Usage 6 | 7 | "USAGE:"; 8 | "--file logfile path"; 9 | "--out output video [~/Downloads/events.mp4]"; 10 | "--fps frames per second of output video [240]"; 11 | "--rate speed-up/slow-down factor [1.0]"; 12 | "--height video height [720]"; 13 | "--width video width [1280]"; 14 | "--vis show conversion process [false]"; 15 | "METHOD: iso [default]"; 16 | "--window seconds of window length [0.5]"; 17 | "METHOD: --tw"; 18 | "--window seconds of window length [0.01]"; 19 | "METHOD: --scarf"; 20 | "--block_size array dimension [14]"; 21 | "--alpha events accumulation factor [1.0]"; 22 | "--C intensity [0.3]"; 23 | -------------------------------------------------------------------------------- /cpp_tools/vFramer/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(vFramer) 2 | 3 | add_executable(${PROJECT_NAME} vFramer.cpp drawers.cpp drawers.h) 4 | 5 | target_include_directories(${PROJECT_NAME} PRIVATE ${OpenCV_INCLUDE_DIRS}) 6 | 7 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_os 8 | YARP::YARP_init 9 | YARP::YARP_sig 10 | YARP::YARP_cv 11 | ${OpenCV_LIBRARIES} 12 | ev::${EVENTDRIVEN_LIBRARY}) 13 | 14 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 15 | 16 | # yarp_install(FILES ${PROJECT_NAME}.ini 17 | # DESTINATION ${EVENT-DRIVEN_CONTEXTS_INSTALL_DIR}/${CONTEXT_DIR}) 18 | 19 | -------------------------------------------------------------------------------- /cpp_tools/vFramer/README.MD: -------------------------------------------------------------------------------- 1 | # vFramer 2 | 3 | A visualisation application that read events from a YARP port and displays it on a screen 4 | 5 | ### Usage 6 | 7 | `vFramer` 8 | -------------------------------------------------------------------------------- /cpp_tools/vFramer/vFramer.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | vFramer 6 | processing 7 | Converts an event stream to an image 8 | Released under the terms of the GNU GPL v2.0 9 | 1.0 10 | 11 | 12 | Multiple "drawers" exist to visualise the event-stream in different ways and for different event types. 13 | 14 | 15 | 16 | name 17 | height 18 | width 19 | eventWindow 20 | frameRate 21 | displays 31 | flip 32 | 33 | 34 | 35 | Arren Glover 36 | 37 | 38 | 39 | 40 | vBottle 41 | /vFramer/vBottle:i 42 | 43 | Input event stream to be displayed 44 | 45 | 46 | 47 | 48 | ]]> > 49 | /]]>/vBottle:o 50 | 51 | Output image. The module creates as many ports, with the corresponding ]]>, as 52 | specified within the displays parameter. 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /cpp_tools/vPreProcess/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(vPreProcess) 2 | 3 | add_executable(${PROJECT_NAME} vPreProcess.cpp vision.h skin.h imu.h audio.h) 4 | 5 | target_include_directories(${PROJECT_NAME} PRIVATE ${OpenCV_INCLUDE_DIRS}) 6 | 7 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_os 8 | YARP::YARP_init 9 | ${OpenCV_LIBRARIES} 10 | ev::${EVENTDRIVEN_LIBRARY}) 11 | 12 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 13 | 14 | 15 | -------------------------------------------------------------------------------- /cpp_tools/vPreProcess/README.MD: -------------------------------------------------------------------------------- 1 | # pre-processing 2 | 3 | A pre-processing application capable of splitting event streams based on sensor id, simple filtering and simple manipulation (flipping etc.) 4 | 5 | ### Usage 6 | 7 | `vPreProcess` 8 | -------------------------------------------------------------------------------- /cpp_tools/vPreProcess/audio.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | #include 20 | #include 21 | 22 | class audioFunctions 23 | { 24 | private: 25 | ev::BufferedPort output; 26 | ev::packet* p{nullptr}; 27 | 28 | public: 29 | bool open(std::string mname) 30 | { 31 | std::string port_name = mname + "/cochlea/EAR:o"; 32 | if(!output.open(port_name)) { 33 | yError() << "Could not open" << port_name; 34 | return false; 35 | } 36 | p = &(output.prepare()); 37 | return true; 38 | } 39 | void process(ev::earEvent *datum) 40 | { 41 | if(p == nullptr) return; 42 | p->push_back(*datum); 43 | 44 | } 45 | void send(yarp::os::Stamp stamp, double duration) 46 | { 47 | if(p && p->size()) { 48 | p->duration(duration); 49 | p->envelope() = stamp; 50 | output.write(); 51 | p = &(output.prepare()); 52 | } 53 | 54 | } 55 | void close() 56 | { 57 | p = nullptr; 58 | output.unprepare(); 59 | output.close(); 60 | } 61 | 62 | }; -------------------------------------------------------------------------------- /cpp_tools/vPreProcess/imu.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | #include 20 | #include 21 | 22 | class imuFunctions 23 | { 24 | private: 25 | ev::BufferedPort output; 26 | ev::packet* p{nullptr}; 27 | 28 | public: 29 | bool open(std::string mname) 30 | { 31 | std::string port_name = mname + "/imu/IMUS:o"; 32 | if(!output.open(port_name)) { 33 | yError() << "Could not open" << port_name; 34 | return false; 35 | } 36 | p = &(output.prepare()); 37 | return true; 38 | } 39 | void process(ev::IMUS *datum) 40 | { 41 | if(p == nullptr) return; 42 | p->push_back(*datum); 43 | 44 | } 45 | void send(yarp::os::Stamp stamp, double duration) 46 | { 47 | if(p && p->size()) { 48 | p->duration(duration); 49 | p->envelope() = stamp; 50 | output.write(); 51 | p = &(output.prepare()); 52 | } 53 | 54 | } 55 | void close() 56 | { 57 | p = nullptr; 58 | output.unprepare(); 59 | output.close(); 60 | } 61 | 62 | }; -------------------------------------------------------------------------------- /cpp_tools/vPreProcess/skin.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | #include 20 | #include 21 | 22 | 23 | class skinFunctions 24 | { 25 | private: 26 | 27 | ev::BufferedPort output_events; 28 | ev::packet* p_events{nullptr}; 29 | ev::BufferedPort output_samples; 30 | ev::packet* p_samples{nullptr}; 31 | 32 | bool open_events(std::string mname) 33 | { 34 | std::string port_name = mname + "/skin/SKE:o"; 35 | if(!output_events.open(port_name)) { 36 | yError() << "Could not open" << port_name; 37 | return false; 38 | } 39 | p_events = &(output_events.prepare()); 40 | return true; 41 | } 42 | 43 | bool open_samples(std::string mname) 44 | { 45 | std::string port_name = mname + "/skin/SKS:o"; 46 | if(!output_samples.open(port_name)) { 47 | yError() << "Could not open" << port_name; 48 | return false; 49 | } 50 | p_samples = &(output_samples.prepare()); 51 | return true; 52 | } 53 | 54 | public: 55 | 56 | bool open(std::string mname) 57 | { 58 | if(!open_events(mname)) 59 | return false; 60 | if(!open_samples(mname)) 61 | return false; 62 | return true; 63 | } 64 | 65 | void process(ev::encoded *datum) 66 | { 67 | static ev::skinSample ss; 68 | static bool expect_skin_value{false}; 69 | 70 | if(IS_SKSAMPLE(datum->data)) { 71 | if(p_samples) { 72 | if(IS_SSA(datum->data)) { //this is sent first 73 | ss.address = *(ev::skinAE *)datum; 74 | if(expect_skin_value) yError() << "mismatch skin samples"; 75 | expect_skin_value = true; 76 | } else { //IS_SSV -> this is sent second 77 | ss.value = *(ev::skinValue *)datum; 78 | if(expect_skin_value) p_samples->push_back(ss); 79 | else yError() << "mismatch skin samples"; 80 | expect_skin_value = false; 81 | } 82 | } 83 | } else { 84 | if(p_events) p_events->push_back(*(ev::skinAE *)datum); 85 | } 86 | 87 | } 88 | void send(yarp::os::Stamp stamp, double duration) 89 | { 90 | if(p_events && p_events->size()) { 91 | p_events->duration(duration); 92 | p_events->envelope() = stamp; 93 | output_events.write(); 94 | p_events = &(output_events.prepare()); 95 | } 96 | 97 | if (p_samples && p_samples->size()) { 98 | p_samples->duration(duration); 99 | p_samples->envelope() = stamp; 100 | output_samples.write(); 101 | p_samples = &(output_samples.prepare()); 102 | } 103 | } 104 | void close() 105 | { 106 | p_events = nullptr; 107 | output_events.unprepare(); 108 | output_events.close(); 109 | 110 | p_samples = nullptr; 111 | output_samples.unprepare(); 112 | output_samples.close(); 113 | } 114 | 115 | }; -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(zynqGrabber) 2 | 3 | add_executable(${PROJECT_NAME} zynqModule.cpp hpuInterface.h hpuDevice.h vsctrlInterface.h vsctrlDevice.h) 4 | 5 | target_compile_options(${PROJECT_NAME} PRIVATE -Wno-overflow) 6 | 7 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_os 8 | YARP::YARP_init 9 | ev::${EVENTDRIVEN_LIBRARY}) 10 | 11 | target_compile_definitions(${PROJECT_NAME} PRIVATE $<$:ENABLE_TS=1>) 12 | 13 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 14 | 15 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber/README.MD: -------------------------------------------------------------------------------- 1 | # zynq-grabber 2 | 3 | A bridge application between zynq-based FPGA modules and YARP 4 | 5 | ### Usage 6 | 7 | `zynqGrabber` 8 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber/zynqGrabber.ini: -------------------------------------------------------------------------------- 1 | dataDevice /dev/iit-hpu0 2 | hpu_read 3 | packet_size 7500 #these are in number of events 4 | gtp 5 | split 6 | 7 | i2cVision /dev/i2c-0 8 | 9 | #visCtrlLeft /dev/i2c-0 10 | #visCtrlRight /dev/i2c-0 11 | #skinCtrl /dev/i2c-3 12 | 13 | #visLeftOn true 14 | #visRightOn false 15 | #left_off true 16 | #right_off true 17 | sensitivity 70 18 | refractory 1 19 | filter 0.0001 20 | 21 | [ATIS_ROI] 22 | x 0 23 | y 0 24 | width 640 25 | height 480 26 | 27 | 28 | [ATIS1_BIAS_LEFT] 29 | 30 | CtrlbiasLP 1800 967 620 31 | CtrlbiasLBBuff 1800 967 950 32 | CtrlbiasDelTD 1800 967 400 33 | CtrlbiasSeqDelAPS 1800 967 320 34 | CtrlbiasDelAPS 1800 967 350 35 | biasSendReqPdY 1800 967 850 36 | biasSendReqPdX 1800 967 1150 37 | CtrlbiasGB 1800 711 1150 38 | TDbiasReqPuY 1800 711 200 39 | TDbiasReqPuX 1800 711 1200 40 | APSbiasReqPuY 1800 711 1100 41 | APSbiasReqPuX 1800 711 830 42 | APSVrefL 3300 967 3000 43 | APSVrefH 3300 967 3200 44 | APSbiasOut 3300 967 660 45 | APSbiasHyst 3300 967 455 46 | APSbiasTail 3300 967 520 47 | TDbiasCas 3300 839 2000 48 | TDbiasInv 3300 967 800 49 | TDbiasDiffOff 3300 967 450 50 | TDbiasDiffOn 3300 967 625 51 | TDbiasDiff 3300 967 500 52 | TDbiasFo 3300 711 3050 53 | TDbiasRefr 3300 711 2850 54 | TDbiasPR 3300 711 2800 55 | TDbiasBulk 3300 711 2680 56 | biasBuf 3300 967 0 57 | biasAPSreset 3300 711 1000 58 | 59 | [ATIS1_BIAS_RIGHT] 60 | 61 | CtrlbiasLP 1800 967 620 62 | CtrlbiasLBBuff 1800 967 950 63 | CtrlbiasDelTD 1800 967 400 64 | CtrlbiasSeqDelAPS 1800 967 320 65 | CtrlbiasDelAPS 1800 967 350 66 | biasSendReqPdY 1800 967 850 67 | biasSendReqPdX 1800 967 1150 68 | CtrlbiasGB 1800 711 1150 69 | TDbiasReqPuY 1800 711 200 70 | TDbiasReqPuX 1800 711 1200 71 | APSbiasReqPuY 1800 711 1100 72 | APSbiasReqPuX 1800 711 830 73 | APSVrefL 3300 967 3000 74 | APSVrefH 3300 967 3200 75 | APSbiasOut 3300 967 660 76 | APSbiasHyst 3300 967 455 77 | APSbiasTail 3300 967 520 78 | TDbiasCas 3300 839 2000 79 | TDbiasInv 3300 967 800 80 | TDbiasDiffOff 3300 967 450 81 | TDbiasDiffOn 3300 967 625 82 | TDbiasDiff 3300 967 500 83 | TDbiasFo 3300 711 3050 84 | TDbiasRefr 3300 711 2850 85 | TDbiasPR 3300 711 2800 86 | TDbiasBulk 3300 711 2680 87 | biasBuf 3300 967 0 88 | biasAPSreset 3300 711 1000 89 | 90 | [DVS_BIAS_LEFT] 91 | 92 | cas 52458 93 | injg 101508 94 | reqPd 16777215 95 | pux 8053457 96 | diffoff 133 97 | req 160712 98 | refr 944 99 | puy 16777215 100 | diffon 639172 101 | diff 30108 102 | foll 20 103 | pr 5 104 | 105 | [DVS_BIAS_RIGHT] 106 | 107 | cas 52458 108 | injg 101508 109 | reqPd 16777215 110 | pux 8053457 111 | diffoff 133 112 | req 160712 113 | refr 944 114 | puy 16777215 115 | diffon 639172 116 | diff 30108 117 | foll 20 118 | pr 5 119 | 120 | [SKIN_CNFG] 121 | samplesTxEn true 122 | eventsTxEn true 123 | 124 | asrFilterType false 125 | asrFilterEn false 126 | egNthrEn true 127 | preprocSamples true 128 | preprocEg true 129 | driftCompEn false 130 | samplesTxMode true 131 | # enable 16 bits coding (true) or 8 bits coding (false) 132 | samplesRshift 0 133 | # if samples TxMode = false needs to set the shift value 134 | samplesSel false 135 | #Samples source (0: pre-proc, 1: post-preproc); 136 | resamplingTimeout 50 137 | #timebase 50ns 138 | 139 | evGenSel 1 140 | 141 | G1upthresh 0.1 142 | G1downthresh 0.1 143 | G1upnoise 12.0 144 | G1downnoise 12.0 145 | 146 | G2upthresh 30 147 | G2downthresh 30 148 | G2upnoise 50 149 | G2downnoise 50 150 | 151 | #evNeuralUseSA1 152 | SA1inhibit 524288 153 | SA1adapt 328 154 | SA1decay -328 155 | SA1rest 2621 156 | 157 | #evNeuralUseRA1 158 | RA1inhibit 327680 159 | RA1adapt 3 160 | RA1decay -6552 161 | RA1rest 65536 162 | 163 | evNeuralUseRA2 164 | RA2inhibit 327680 165 | RA2adapt 3 166 | RA2decay -3276 167 | RA2rest 2621 -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(zynqGrabber) 2 | 3 | file(GLOB source src/*.cpp) 4 | file(GLOB header include/*.h) 5 | file(GLOB biasfiles bias_files/*.ini) 6 | 7 | add_executable(${PROJECT_NAME} ${source} ${header}) 8 | 9 | message(STATUS "${PROJECT_SOURCE_DIR}") 10 | target_include_directories(${PROJECT_NAME} PRIVATE ${PROJECT_SOURCE_DIR}/include) 11 | 12 | target_compile_options(${PROJECT_NAME} PRIVATE -Wno-overflow) 13 | 14 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_os 15 | YARP::YARP_init 16 | ev::${EVENTDRIVEN_LIBRARY}) 17 | 18 | target_compile_definitions(${PROJECT_NAME} PRIVATE $<$:ENABLE_TS=1>) 19 | 20 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 21 | 22 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/README.MD: -------------------------------------------------------------------------------- 1 | # zynq-grabber 2 | 3 | A bridge application between zynq-based FPGA modules and YARP 4 | 5 | ### Usage 6 | 7 | `zynqGrabber` 8 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_OK_HS.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 350 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 1100 29 | TDbiasReqPuY 1800 711 830 30 | TDbiasReqPuX 1800 711 1070 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 930 33 | APSVrefL 3300 967 3050 34 | APSVrefH 3300 967 3150 35 | APSbiasOut 3300 967 750 36 | APSbiasHyst 3300 967 620 37 | APSbiasTail 3300 967 700 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 410 41 | TDbiasDiffOn 3300 967 790 42 | TDbiasDiff 3300 967 600 43 | TDbiasFo 3300 711 2950 44 | TDbiasRefr 3300 711 2900 45 | TDbiasPR 3300 711 2950 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 111 48 | biasAPSreset 3300 711 222 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 320 56 | CtrlbiasDelAPS 1800 967 350 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 1100 60 | TDbiasReqPuY 1800 711 830 61 | TDbiasReqPuX 1800 711 1070 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 930 64 | APSVrefL 3300 967 3050 65 | APSVrefH 3300 967 3150 66 | APSbiasOut 3300 967 750 67 | APSbiasHyst 3300 967 620 68 | APSbiasTail 3300 967 700 69 | TDbiasCas 3300 839 2000 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 410 72 | TDbiasDiffOn 3300 967 790 73 | TDbiasDiff 3300 967 600 74 | TDbiasFo 3300 711 2950 75 | TDbiasRefr 3300 711 2900 76 | TDbiasPR 3300 711 2950 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 111 79 | biasAPSreset 3300 711 222 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_OK_std.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 350 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 970 29 | TDbiasReqPuY 1800 711 810 30 | TDbiasReqPuX 1800 711 1240 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 820 33 | APSVrefL 3300 967 3050 34 | APSVrefH 3300 967 3150 35 | APSbiasOut 3300 967 750 36 | APSbiasHyst 3300 967 620 37 | APSbiasTail 3300 967 700 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 520 41 | TDbiasDiffOn 3300 967 680 42 | TDbiasDiff 3300 967 550 43 | TDbiasFo 3300 711 3100 44 | TDbiasRefr 3300 711 2800 45 | TDbiasPR 3300 711 3150 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 950 48 | biasAPSreset 3300 711 620 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 320 56 | CtrlbiasDelAPS 1800 967 350 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 970 60 | TDbiasReqPuY 1800 711 810 61 | TDbiasReqPuX 1800 711 1240 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 820 64 | APSVrefL 3300 967 3050 65 | APSVrefH 3300 967 3150 66 | APSbiasOut 3300 967 750 67 | APSbiasHyst 3300 967 620 68 | APSbiasTail 3300 967 700 69 | TDbiasCas 3300 839 2000 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 520 72 | TDbiasDiffOn 3300 967 680 73 | TDbiasDiff 3300 967 550 74 | TDbiasFo 3300 711 3100 75 | TDbiasRefr 3300 711 2800 76 | TDbiasPR 3300 711 3150 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 950 79 | biasAPSreset 3300 711 620 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_chiara.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 350 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 1150 29 | TDbiasReqPuY 1800 711 200 30 | TDbiasReqPuX 1800 711 1200 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 830 33 | APSVrefL 3300 967 3000 34 | APSVrefH 3300 967 3200 35 | APSbiasOut 3300 967 660 36 | APSbiasHyst 3300 967 455 37 | APSbiasTail 3300 967 520 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 460 41 | TDbiasDiffOn 3300 967 620 42 | TDbiasDiff 3300 967 500 43 | TDbiasFo 3300 711 3050 44 | TDbiasRefr 3300 711 2850 45 | TDbiasPR 3300 711 3010 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 0 48 | biasAPSreset 3300 711 1000 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 320 56 | CtrlbiasDelAPS 1800 967 350 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 1150 60 | TDbiasReqPuY 1800 711 200 61 | TDbiasReqPuX 1800 711 1200 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 830 64 | APSVrefL 3300 967 3000 65 | APSVrefH 3300 967 3200 66 | APSbiasOut 3300 967 660 67 | APSbiasHyst 3300 967 455 68 | APSbiasTail 3300 967 520 69 | TDbiasCas 3300 839 2000 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 460 72 | TDbiasDiffOn 3300 967 620 73 | TDbiasDiff 3300 967 500 74 | TDbiasFo 3300 711 3050 75 | TDbiasRefr 3300 711 2850 76 | TDbiasPR 3300 711 3010 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 0 79 | biasAPSreset 3300 711 1000 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_chris1.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 350 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 1150 29 | TDbiasReqPuY 1800 711 200 30 | TDbiasReqPuX 1800 711 1200 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 830 33 | APSVrefL 3300 967 3000 34 | APSVrefH 3300 967 3200 35 | APSbiasOut 3300 967 660 36 | APSbiasHyst 3300 967 455 37 | APSbiasTail 3300 967 520 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 450 41 | TDbiasDiffOn 3300 967 625 42 | TDbiasDiff 3300 967 500 43 | TDbiasFo 3300 711 3050 44 | TDbiasRefr 3300 711 2850 45 | TDbiasPR 3300 711 2800 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 0 48 | biasAPSreset 3300 711 1000 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 320 56 | CtrlbiasDelAPS 1800 967 350 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 1150 60 | TDbiasReqPuY 1800 711 200 61 | TDbiasReqPuX 1800 711 1200 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 830 64 | APSVrefL 3300 967 3000 65 | APSVrefH 3300 967 3200 66 | APSbiasOut 3300 967 660 67 | APSbiasHyst 3300 967 455 68 | APSbiasTail 3300 967 520 69 | TDbiasCas 3300 839 2000 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 450 72 | TDbiasDiffOn 3300 967 625 73 | TDbiasDiff 3300 967 500 74 | TDbiasFo 3300 711 3050 75 | TDbiasRefr 3300 711 2850 76 | TDbiasPR 3300 711 2800 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 0 79 | biasAPSreset 3300 711 1000 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_chris2.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 300 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 1150 29 | TDbiasReqPuY 1800 711 850 30 | TDbiasReqPuX 1800 711 1072 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 900 33 | APSVrefL 3300 967 3000 34 | APSVrefH 3300 967 3200 35 | APSbiasOut 3300 967 660 36 | APSbiasHyst 3300 967 455 37 | APSbiasTail 3300 967 520 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 450 41 | TDbiasDiffOn 3300 967 625 42 | TDbiasDiff 3300 967 500 43 | TDbiasFo 3300 711 3050 44 | TDbiasRefr 3300 711 2850 45 | TDbiasPR 3300 711 2800 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 0 48 | biasAPSreset 3300 711 0 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 320 56 | CtrlbiasDelAPS 1800 967 300 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 1150 60 | TDbiasReqPuY 1800 711 850 61 | TDbiasReqPuX 1800 711 1072 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 900 64 | APSVrefL 3300 967 3000 65 | APSVrefH 3300 967 3200 66 | APSbiasOut 3300 967 660 67 | APSbiasHyst 3300 967 455 68 | APSbiasTail 3300 967 520 69 | TDbiasCas 3300 839 2000 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 450 72 | TDbiasDiffOn 3300 967 625 73 | TDbiasDiff 3300 967 500 74 | TDbiasFo 3300 711 3050 75 | TDbiasRefr 3300 711 2850 76 | TDbiasPR 3300 711 2800 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 0 79 | biasAPSreset 3300 711 0 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_icubgenova02.ini: -------------------------------------------------------------------------------- 1 | \name /zynqGrabber 2 | verbose true 3 | aps false 4 | 5 | #these are in number of events 6 | dataDevice /dev/iit-hpu0 7 | hpu_read 8 | packet_size 7500 9 | 10 | visCtrlLeft /dev/i2c-2 11 | visCtrlRight /dev/i2c-2 12 | #skinCtrl /dev/i2c-3 13 | 14 | visLeftOn true 15 | visRightOn true 16 | 17 | [ATIS_BIAS_LEFT] 18 | 19 | CtrlbiasLP 1800 967 620 20 | CtrlbiasLBBuff 1800 967 950 21 | CtrlbiasDelTD 1800 967 400 22 | CtrlbiasSeqDelAPS 1800 967 320 23 | CtrlbiasDelAPS 1800 967 350 24 | biasSendReqPdY 1800 967 850 25 | biasSendReqPdX 1800 967 1150 26 | CtrlbiasGB 1800 711 1150 27 | TDbiasReqPuY 1800 711 200 28 | TDbiasReqPuX 1800 711 1200 29 | APSbiasReqPuY 1800 711 1100 30 | APSbiasReqPuX 1800 711 830 31 | APSVrefL 3300 967 3000 32 | APSVrefH 3300 967 3200 33 | APSbiasOut 3300 967 660 34 | APSbiasHyst 3300 967 455 35 | APSbiasTail 3300 967 520 36 | TDbiasCas 3300 839 2000 37 | TDbiasInv 3300 967 800 38 | TDbiasDiffOff 3300 967 420 39 | TDbiasDiffOn 3300 967 650 40 | TDbiasDiff 3300 967 500 41 | TDbiasFo 3300 711 3050 42 | TDbiasRefr 3300 711 2850 43 | TDbiasPR 3300 711 2800 44 | TDbiasBulk 3300 711 2680 45 | biasBuf 3300 967 0 46 | biasAPSreset 3300 711 1000 47 | 48 | [ATIS_BIAS_RIGHT] 49 | 50 | CtrlbiasLP 1800 967 620 51 | CtrlbiasLBBuff 1800 967 950 52 | CtrlbiasDelTD 1800 967 400 53 | CtrlbiasSeqDelAPS 1800 967 320 54 | CtrlbiasDelAPS 1800 967 350 55 | biasSendReqPdY 1800 967 850 56 | biasSendReqPdX 1800 967 1150 57 | CtrlbiasGB 1800 711 1150 58 | TDbiasReqPuY 1800 711 200 59 | TDbiasReqPuX 1800 711 1200 60 | APSbiasReqPuY 1800 711 1100 61 | APSbiasReqPuX 1800 711 830 62 | APSVrefL 3300 967 3000 63 | APSVrefH 3300 967 3200 64 | APSbiasOut 3300 967 660 65 | APSbiasHyst 3300 967 455 66 | APSbiasTail 3300 967 520 67 | TDbiasCas 3300 839 2000 68 | TDbiasInv 3300 967 800 69 | TDbiasDiffOff 3300 967 420 70 | TDbiasDiffOn 3300 967 650 71 | TDbiasDiff 3300 967 500 72 | TDbiasFo 3300 711 3050 73 | TDbiasRefr 3300 711 2850 74 | TDbiasPR 3300 711 2800 75 | TDbiasBulk 3300 711 2680 76 | biasBuf 3300 967 0 77 | biasAPSreset 3300 711 1000 78 | 79 | [SKIN_CNFG] 80 | samplesTxEn true 81 | eventsTxEn true 82 | 83 | evGenSel 1 84 | 85 | G1upthresh 0.1 86 | G1downthresh 0.1 87 | G1upnoise 12.0 88 | G1downnoise 12.0 89 | 90 | G2upthresh 300 91 | G2downthresh 300 92 | G2upnoise 70 93 | G2downnoise 70 94 | 95 | #evNeuralUseSA1 96 | SA1inhibit 524288 97 | SA1adapt 328 98 | SA1decay -328 99 | SA1rest 2621 100 | 101 | #evNeuralUseRA1 102 | RA1inhibit 327680 103 | RA1adapt 3 104 | RA1decay -6552 105 | RA1rest 65536 106 | 107 | #evNeuralUseRA2 108 | RA2inhibit 327680 109 | RA2adapt 3 110 | RA2decay -3276 111 | RA2rest 2621 112 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/ATIS_orig.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 967 620 22 | CtrlbiasLBBuff 1800 967 950 23 | CtrlbiasDelTD 1800 967 400 24 | CtrlbiasSeqDelAPS 1800 967 320 25 | CtrlbiasDelAPS 1800 967 350 26 | biasSendReqPdY 1800 967 850 27 | biasSendReqPdX 1800 967 1150 28 | CtrlbiasGB 1800 711 1150 29 | TDbiasReqPuY 1800 711 200 30 | TDbiasReqPuX 1800 711 1200 31 | APSbiasReqPuY 1800 711 1100 32 | APSbiasReqPuX 1800 711 830 33 | APSVrefL 3300 967 3000 34 | APSVrefH 3300 967 3200 35 | APSbiasOut 3300 967 660 36 | APSbiasHyst 3300 967 455 37 | APSbiasTail 3300 967 520 38 | TDbiasCas 3300 839 2000 39 | TDbiasInv 3300 967 800 40 | TDbiasDiffOff 3300 967 450 41 | TDbiasDiffOn 3300 967 625 42 | TDbiasDiff 3300 967 500 43 | TDbiasFo 3300 711 3050 44 | TDbiasRefr 3300 711 2850 45 | TDbiasPR 3300 711 2800 46 | TDbiasBulk 3300 711 2680 47 | biasBuf 3300 967 0 48 | biasAPSreset 3300 711 1000 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 967 620 53 | CtrlbiasLBBuff 1800 967 950 54 | CtrlbiasDelTD 1800 967 400 55 | CtrlbiasSeqDelAPS 1800 967 400 56 | CtrlbiasDelAPS 1800 967 350 57 | biasSendReqPdY 1800 967 850 58 | biasSendReqPdX 1800 967 1150 59 | CtrlbiasGB 1800 711 970 60 | TDbiasReqPuY 1800 711 200 61 | TDbiasReqPuX 1800 711 1200 62 | APSbiasReqPuY 1800 711 1100 63 | APSbiasReqPuX 1800 711 830 64 | APSVrefL 3300 967 3050 65 | APSVrefH 3300 967 3150 66 | APSbiasOut 3300 967 750 67 | APSbiasHyst 3300 967 620 68 | APSbiasTail 3300 967 700 69 | TDbiasCas 3300 839 1200 70 | TDbiasInv 3300 967 800 71 | TDbiasDiffOff 3300 967 520 72 | TDbiasDiffOn 3300 967 670 73 | TDbiasDiff 3300 967 550 74 | TDbiasFo 3300 711 3100 75 | TDbiasRefr 3300 711 2800 76 | TDbiasPR 3300 711 3150 77 | TDbiasBulk 3300 711 2680 78 | biasBuf 3300 967 1111 79 | biasAPSreset 3300 711 2222 80 | 81 | [DVS_BIAS_LEFT] 82 | 83 | cas 52458 84 | injg 101508 85 | reqPd 16777215 86 | pux 8053457 87 | diffoff 133 88 | req 160712 89 | refr 944 90 | puy 16777215 91 | diffon 639172 92 | diff 30108 93 | foll 20 94 | pr 5 95 | 96 | [DVS_BIAS_RIGHT] 97 | 98 | cas 52458 99 | injg 101508 100 | reqPd 16777215 101 | pux 8053457 102 | diffoff 133 103 | req 160712 104 | refr 944 105 | puy 16777215 106 | diffon 639172 107 | diff 30108 108 | foll 20 109 | pr 5 110 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/chris_WS_current.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | strict true 3 | verbose false 4 | 5 | dataDevice /dev/iit-hpu0 6 | readPacketSize 512 7 | maxBottleSize 5000000 8 | errorcheck false 9 | 10 | controllerDevice /dev/i2c-2 11 | applyFilter false 12 | 13 | [FILTER_PARAMS] 14 | width 304 15 | height 240 16 | tsize 100000 17 | ssize 1 18 | 19 | [ATIS_BIAS_LEFT] 20 | 21 | CtrlbiasLP 1800 775 236808 22 | CtrlbiasLBBuff 1800 519 21789 23 | CtrlbiasDelTD 1800 775 13413 24 | CtrlbiasSeqDelAPS 1800 903 526834 25 | CtrlbiasDelAPS 1800 903 962013 26 | biasSendReqPdY 1800 519 65637 27 | biasSendReqPdX 1800 519 1871 28 | CtrlbiasGB 1800 519 1871 29 | TDbiasReqPuY 1800 903 21551 30 | TDbiasReqPuX 1800 647 1176808 31 | APSbiasReqPuY 1800 519 3533 32 | APSbiasReqPuX 1800 519 80579 33 | APSVrefL 3300 647 4 34 | APSVrefH 3300 647 1 35 | APSbiasOut 3300 903 1361445 36 | APSbiasHyst 3300 903 16529 37 | APSbiasTail 3300 903 85767 38 | TDbiasCas 3300 519 2197 39 | TDbiasInv 3300 775 921 40 | TDbiasDiffOff 3300 903 14496 41 | TDbiasDiffOn 3300 903 782046 42 | TDbiasDiff 3300 903 52428 43 | TDbiasFo 3300 647 1 44 | TDbiasRefr 3300 647 228 45 | TDbiasPR 3300 647 813 46 | TDbiasBulk 3300 647 15048 47 | biasBuf 3300 903 1 48 | biasAPSreset 3300 775 10074 49 | 50 | [ATIS_BIAS_RIGHT] 51 | 52 | CtrlbiasLP 1800 775 236808 53 | CtrlbiasLBBuff 1800 519 21789 54 | CtrlbiasDelTD 1800 775 13413 55 | CtrlbiasSeqDelAPS 1800 903 526834 56 | CtrlbiasDelAPS 1800 903 962013 57 | biasSendReqPdY 1800 519 65637 58 | biasSendReqPdX 1800 519 1871 59 | CtrlbiasGB 1800 519 1871 60 | TDbiasReqPuY 1800 903 21551 61 | TDbiasReqPuX 1800 647 1176808 62 | APSbiasReqPuY 1800 519 3533 63 | APSbiasReqPuX 1800 519 80579 64 | APSVrefL 3300 647 4 65 | APSVrefH 3300 647 1 66 | APSbiasOut 3300 903 1361445 67 | APSbiasHyst 3300 903 16529 68 | APSbiasTail 3300 903 85767 69 | TDbiasCas 3300 519 2197 70 | TDbiasInv 3300 775 921 71 | TDbiasDiffOff 3300 903 14496 72 | TDbiasDiffOn 3300 903 782046 73 | TDbiasDiff 3300 903 52428 74 | TDbiasFo 3300 647 1 75 | TDbiasRefr 3300 647 228 76 | TDbiasPR 3300 647 813 77 | TDbiasBulk 3300 647 15048 78 | biasBuf 3300 903 1 79 | biasAPSreset 3300 775 10074 80 | 81 | 82 | [DVS_BIAS_LEFT] 83 | 84 | cas 52458 85 | injg 101508 86 | reqPd 16777215 87 | pux 8053457 88 | diffoff 133 89 | req 160712 90 | refr 944 91 | puy 16777215 92 | diffon 639172 93 | diff 30108 94 | foll 20 95 | pr 5 96 | 97 | [DVS_BIAS_RIGHT] 98 | 99 | cas 52458 100 | injg 101508 101 | reqPd 16777215 102 | pux 8053457 103 | diffoff 133 104 | req 160712 105 | refr 944 106 | puy 16777215 107 | diffon 639172 108 | diff 30108 109 | foll 20 110 | pr 5 111 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/bias_files/icra_demo.ini: -------------------------------------------------------------------------------- 1 | name /zynqGrabber 2 | verbose false 3 | 4 | #these are in number of events 5 | dataDevice /dev/iit-hpu0 6 | packet_size 2560 7 | buffer_size 5120000 8 | hpu_read 9 | direct_read 10 | 11 | #controllerDevice /dev/i2c-2 12 | visCtrlLeft /dev/i2c-2 13 | 14 | [ATIS_BIAS_LEFT] 15 | 16 | CtrlbiasLP 1800 967 620 17 | CtrlbiasLBBuff 1800 967 950 18 | CtrlbiasDelTD 1800 967 400 19 | CtrlbiasSeqDelAPS 1800 967 320 20 | CtrlbiasDelAPS 1800 967 350 21 | biasSendReqPdY 1800 967 850 22 | biasSendReqPdX 1800 967 1150 23 | CtrlbiasGB 1800 711 1150 24 | TDbiasReqPuY 1800 711 200 25 | TDbiasReqPuX 1800 711 1200 26 | APSbiasReqPuY 1800 711 1100 27 | APSbiasReqPuX 1800 711 830 28 | APSVrefL 3300 967 3000 29 | APSVrefH 3300 967 3200 30 | APSbiasOut 3300 967 660 31 | APSbiasHyst 3300 967 455 32 | APSbiasTail 3300 967 520 33 | TDbiasCas 3300 839 2000 34 | TDbiasInv 3300 967 800 35 | TDbiasDiffOff 3300 967 400 36 | TDbiasDiffOn 3300 967 700 37 | TDbiasDiff 3300 967 500 38 | TDbiasFo 3300 711 3050 39 | TDbiasRefr 3300 711 3000 40 | TDbiasPR 3300 711 2800 41 | TDbiasBulk 3300 711 2680 42 | biasBuf 3300 967 0 43 | biasAPSreset 3300 711 1000 44 | 45 | [ATIS_BIAS_RIGHT] 46 | 47 | CtrlbiasLP 1800 967 620 48 | CtrlbiasLBBuff 1800 967 950 49 | CtrlbiasDelTD 1800 967 400 50 | CtrlbiasSeqDelAPS 1800 967 320 51 | CtrlbiasDelAPS 1800 967 350 52 | biasSendReqPdY 1800 967 850 53 | biasSendReqPdX 1800 967 1150 54 | CtrlbiasGB 1800 711 1150 55 | TDbiasReqPuY 1800 711 200 56 | TDbiasReqPuX 1800 711 1200 57 | APSbiasReqPuY 1800 711 1100 58 | APSbiasReqPuX 1800 711 830 59 | APSVrefL 3300 967 3000 60 | APSVrefH 3300 967 3200 61 | APSbiasOut 3300 967 660 62 | APSbiasHyst 3300 967 455 63 | APSbiasTail 3300 967 520 64 | TDbiasCas 3300 839 2000 65 | TDbiasInv 3300 967 800 66 | TDbiasDiffOff 3300 967 400 67 | TDbiasDiffOn 3300 967 700 68 | TDbiasDiff 3300 967 500 69 | TDbiasFo 3300 711 3050 70 | TDbiasRefr 3300 711 3000 71 | TDbiasPR 3300 711 2800 72 | TDbiasBulk 3300 711 2680 73 | biasBuf 3300 967 0 74 | biasAPSreset 3300 711 1000 75 | 76 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/hpuInterface.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * chiara.bartolozzi@iit.it 5 | * 6 | * This program is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | #ifndef __EVENTDRIVENYARPINTERFACE__ 21 | #define __EVENTDRIVENYARPINTERFACE__ 22 | 23 | #include 24 | #include 25 | #include 26 | #include 27 | 28 | /******************************************************************************/ 29 | //device2yarp 30 | /******************************************************************************/ 31 | class device2yarp : public yarp::os::Thread { 32 | 33 | private: 34 | 35 | //data buffer thread 36 | int fd; 37 | ev::BufferedPort output_port; 38 | yarp::os::Stamp yarp_stamp; 39 | 40 | //parameters 41 | unsigned int max_dma_pool_size; 42 | unsigned int max_packet_size; 43 | std::string port_name; 44 | double min_packet_duration{0.0}; 45 | 46 | public: 47 | 48 | device2yarp(); 49 | void configure(std::string module_name, int fd, unsigned int pool_size, 50 | unsigned int packet_size, bool record_mode); 51 | void yarpOpen(); 52 | void run(); 53 | void onStop(); 54 | 55 | }; 56 | 57 | /******************************************************************************/ 58 | //yarp2device 59 | /******************************************************************************/ 60 | class yarp2device : public yarp::os::Thread 61 | { 62 | protected: 63 | 64 | int fd; 65 | ev::BufferedPort input_port; 66 | std::string port_name; 67 | 68 | public: 69 | 70 | yarp2device(); 71 | void configure(std::string module_name, int fd); 72 | void yarpOpen(); 73 | void run(); 74 | void onStop(); 75 | 76 | 77 | }; 78 | 79 | /******************************************************************************/ 80 | //hpuInterface 81 | /******************************************************************************/ 82 | class hpuInterface { 83 | 84 | private: 85 | 86 | int fd; 87 | device2yarp D2Y; 88 | yarp2device Y2D; 89 | 90 | int pool_size; 91 | bool read_thread_open; 92 | bool write_thread_open; 93 | 94 | public: 95 | 96 | hpuInterface(); 97 | 98 | bool configureDevice(std::string device_name, bool spinnaker = false, 99 | bool loopback = false, bool gtp = true); 100 | bool openReadPort(std::string module_name, unsigned int packet_size, bool record_mode = false); 101 | bool openWritePort(std::string module_name); 102 | void tryconnectToYARP(); 103 | void start(); 104 | void stop(); 105 | 106 | }; 107 | 108 | #endif 109 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/skinController.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * chiara.bartolozzi@iit.it 5 | * 6 | * This program is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | #ifndef __VSKINCTRL__ 21 | #define __VSKINCTRL__ 22 | 23 | #include 24 | #include 25 | 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | 32 | class vSkinCtrl 33 | { 34 | private: 35 | 36 | //PARAMETERS 37 | std::string deviceName; 38 | unsigned char I2CAddress; 39 | 40 | //INTERNAL VARIABLES 41 | int fd; 42 | 43 | //INTERNAL FUNCTIONS 44 | int i2cRead(unsigned char reg, unsigned char *data, unsigned int size); 45 | int i2cWrite(unsigned char reg, unsigned char *data, unsigned int size); 46 | int i2cWrite(unsigned char reg, unsigned int data); 47 | 48 | //WRAPPERS? 49 | bool setDefaultRegisterValues(); //new initDevice 50 | bool select_generator(int type, int neural_mask = 0); 51 | bool config_generator(int type, uint32_t p1, uint32_t p2, uint32_t p3, uint32_t p4); 52 | 53 | 54 | public: 55 | 56 | //REQUIRE: devicefilename, chiptype (eg DVS/ATIS), chipFPGAaddress (eg LEFT or RIGHT) 57 | vSkinCtrl(std::string deviceName = "", unsigned char i2cAddress = 0); 58 | 59 | //CONNECTION 60 | bool connect(void); 61 | bool configure(); 62 | bool calibrate(); 63 | //void disconnect(bool andturnoff = true); 64 | void disconnect(); 65 | 66 | //DEBUG OUTPUTS 67 | void printConfiguration(void); // bias file, void dumpRegisterValues(); 68 | int printFpgaStatus(); 69 | bool configureRegisters(yarp::os::Bottle cnfgReg); 70 | bool setRegister(int byte, unsigned int mask, unsigned char regAddr, bool regVal); 71 | bool setRegister(unsigned char regAddr, double regVal); 72 | 73 | 74 | }; 75 | 76 | #endif 77 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/visCtrlATIS1.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "visionController.h" 3 | #include "yarp/os/all.h" 4 | 5 | class visCtrlATIS1 : public visCtrlInterface 6 | { 7 | private: 8 | //const static int VSCTRL_BG_CNFG_ADDR = 0x20; 9 | bool setLatchAtEnd(bool enable); 10 | bool setShiftCount(uint8_t shiftCount); 11 | bool setDefaultRegisterValues(); 12 | bool updateBiases(yarp::os::Bottle &bias_list, bool voltage_biases = true); 13 | bool activateAPSshutter(); 14 | 15 | 16 | public: 17 | visCtrlATIS1(int fd, channel_name channel) : visCtrlInterface(fd, channel) {}; 18 | bool activate(bool activate = true) override; 19 | bool configure(yarp::os::ResourceFinder rf) override; 20 | 21 | }; -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/visCtrlATIS3.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "visionController.h" 3 | 4 | class visCtrlATIS3 : public visCtrlInterface 5 | { 6 | private: 7 | int readSisleyRegister(uint32_t sisley_reg_address, uint32_t *sisley_data); 8 | int writeSisleyRegister(uint32_t sisley_reg_address, uint32_t sisley_data); 9 | bool enableHSSAER(); 10 | bool setROI(int x, int y, int width, int height); 11 | int setROIAXIS(int start, int size, xory_t coord, tdorem_t type); 12 | bool updateBiases(yarp::os::Bottle &bias); 13 | void printSensitivyBiases(); 14 | void setSensitivityBiases(int sensitivity); 15 | void printRefractoryBias(); 16 | void setRefractoryBias(int period); 17 | 18 | public: 19 | visCtrlATIS3(int fd, channel_name channel) : visCtrlInterface(fd, channel) {}; 20 | bool activate(bool activate = true) override; 21 | bool configure(yarp::os::ResourceFinder rf) override; 22 | 23 | }; -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/visionController.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * chiara.bartolozzi@iit.it 5 | * 6 | * This program is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | #pragma once 21 | 22 | #include 23 | #include 24 | #include 25 | 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | 32 | typedef struct fpgaStatus { 33 | bool crcErr; 34 | bool biasDone; 35 | bool i2cTimeout; 36 | bool apsFifoFull; 37 | bool tdFifoFull; 38 | } fpgaStatus_t; 39 | 40 | 41 | class visCtrlInterface 42 | { 43 | public: 44 | enum channel_name {LEFT = 0, RIGHT = 1}; 45 | enum cam_type {DVS = 1, ATIS1 = 2, ATIS3 = 3}; //correspond to FPGA values 46 | 47 | protected: 48 | 49 | static const int AUTO_INCREMENT = 0x80; 50 | static const int I2C_LEFT = 0x10; 51 | static const int I2C_RIGHT = 0x11; 52 | static const int VCTRL_INFO = 0x00; 53 | 54 | int fd{-1}; 55 | channel_name channel{LEFT}; 56 | 57 | static int extractCamType(int reg_value); 58 | static int channelSelect(int fd, channel_name name); 59 | static int i2cRead(int fd, unsigned char reg, unsigned char *data, 60 | unsigned int size); 61 | static int i2cWrite(int fd, unsigned char reg, unsigned char *data, 62 | unsigned int size); 63 | static void printConfiguration(int fd, channel_name name); 64 | 65 | static bool checkBiasDone(int fd); 66 | static bool checkFifoFull(int fd); 67 | static bool checkAPSFifoFull(int fd); 68 | static bool checki2cTimeout(int fd); 69 | static bool checkCRCError(int fd); 70 | static bool clearStatusReg(int fd); 71 | 72 | public: 73 | 74 | visCtrlInterface(int fd, channel_name channel); 75 | static int openI2Cdevice(std::string path); 76 | static void closeI2Cdevice(int fd); 77 | static int getChannelI2CAddress(int fd, channel_name channel); 78 | static int readCameraType(int fd, channel_name name); 79 | 80 | virtual bool activate(bool activate = true); 81 | virtual bool configure(yarp::os::ResourceFinder rf) = 0; 82 | virtual void printConfiguration(); 83 | }; 84 | 85 | class autoVisionController 86 | { 87 | private: 88 | int fd; 89 | visCtrlInterface * controls[2]; 90 | visCtrlInterface *createController(int fd, visCtrlInterface::channel_name channel); 91 | 92 | public: 93 | 94 | autoVisionController(); 95 | ~autoVisionController(); 96 | void connect(std::string i2c_device); 97 | void configureAndActivate(yarp::os::ResourceFinder rf); 98 | void disconnect() 99 | { 100 | // if(controls[0]) controls[0]->activate(false); 101 | // if(controls[1]) controls[1]->activate(false); 102 | } 103 | }; 104 | 105 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/include/zynqGrabberModule.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * chiara.bartolozzi@iit.it 5 | * 6 | * This program is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | // \defgroup HardwareIO HardwareIO 21 | // \defgroup zynqGrabber zynqGrabber 22 | // \ingroup HardwareIO 23 | // \brief flexibile YARP-hardware interface for multiple sensor types 24 | 25 | #ifndef _ZYNQ_GRABBER_MODULE_H_ 26 | #define _ZYNQ_GRABBER_MODULE_H_ 27 | 28 | #define COMMAND_VOCAB_HELP createVocab('h','e','l','p') 29 | #define COMMAND_VOCAB_NAME createVocab('n','a','m','e') 30 | #define COMMAND_VOCAB_SUSPEND createVocab('s','u','s') 31 | #define COMMAND_VOCAB_RESUME createVocab('r','e','s') 32 | #define COMMAND_VOCAB_FAILED createVocab('f','a','i','l') 33 | #define COMMAND_VOCAB_OK createVocab('o','k') 34 | #define COMMAND_VOCAB_GETBIAS createVocab('g','e','t') // get biasName 35 | #define COMMAND_VOCAB_SETBIAS createVocab('s','e','t') // set biasName biasValue 36 | #define COMMAND_VOCAB_PROG createVocab('p','r','o','g') 37 | #define COMMAND_VOCAB_PWROFF createVocab('o','f','f') 38 | #define COMMAND_VOCAB_PWRON createVocab('o','n') 39 | #define COMMAND_VOCAB_RST createVocab('r','s','t') 40 | #define COMMAND_VOCAB_SETSKIN createVocab('s','s','e','t') // set regName regValue 41 | 42 | 43 | 44 | #include 45 | #include 46 | 47 | #include 48 | #include 49 | #include 50 | #include 51 | #include 52 | 53 | //within project includes 54 | #include "hpuInterface.h" 55 | #include "visionController.h" 56 | #include "skinController.h" 57 | 58 | class zynqGrabberModule : public yarp::os::RFModule { 59 | 60 | yarp::os::Port handlerPort; // a port to handle messages 61 | 62 | //HANDLES DEVICE CONFIGURATION 63 | //vVisionCtrl vsctrlMngLeft; 64 | //vVisionCtrl vsctrlMngRight; 65 | autoVisionController visCtrlManager; 66 | vSkinCtrl skctrlMng; 67 | 68 | hpuInterface hpu; 69 | 70 | public: 71 | 72 | bool configure(yarp::os::ResourceFinder &rf); // configure all the module parameters and return true if successful 73 | bool interruptModule(); // interrupt, e.g., the ports 74 | bool close(); // close and shut down the module 75 | bool respond(const yarp::os::Bottle& command, yarp::os::Bottle& reply); 76 | double getPeriod(); 77 | bool updateModule(); 78 | 79 | }; 80 | 81 | #endif // __ZYNQ_GRABBER_MODULE_H__ 82 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/zynqGrabber.ini: -------------------------------------------------------------------------------- 1 | dataDevice /dev/iit-hpu0 2 | hpu_read 3 | #hpu_write 4 | packet_size 7500 5 | 6 | i2cVision /dev/i2c-0 7 | skinCtrl /dev/i2c-3 8 | 9 | visLeftOn false 10 | visRightOn true 11 | sensitivity 50 12 | refractory 5 13 | 14 | [ATIS_BIAS_LEFT] 15 | 16 | CtrlbiasLP 1800 967 620 17 | CtrlbiasLBBuff 1800 967 950 18 | CtrlbiasDelTD 1800 967 400 19 | CtrlbiasSeqDelAPS 1800 967 320 20 | CtrlbiasDelAPS 1800 967 350 21 | biasSendReqPdY 1800 967 850 22 | biasSendReqPdX 1800 967 1150 23 | CtrlbiasGB 1800 711 1150 24 | TDbiasReqPuY 1800 711 200 25 | TDbiasReqPuX 1800 711 1200 26 | APSbiasReqPuY 1800 711 1100 27 | APSbiasReqPuX 1800 711 830 28 | APSVrefL 3300 967 3000 29 | APSVrefH 3300 967 3200 30 | APSbiasOut 3300 967 660 31 | APSbiasHyst 3300 967 455 32 | APSbiasTail 3300 967 520 33 | TDbiasCas 3300 839 2000 34 | TDbiasInv 3300 967 800 35 | TDbiasDiffOff 3300 967 450 36 | TDbiasDiffOn 3300 967 625 37 | TDbiasDiff 3300 967 500 38 | TDbiasFo 3300 711 3050 39 | TDbiasRefr 3300 711 2850 40 | TDbiasPR 3300 711 2800 41 | TDbiasBulk 3300 711 2680 42 | biasBuf 3300 967 0 43 | biasAPSreset 3300 711 1000 44 | 45 | [ATIS_BIAS_RIGHT] 46 | 47 | CtrlbiasLP 1800 967 620 48 | CtrlbiasLBBuff 1800 967 950 49 | CtrlbiasDelTD 1800 967 400 50 | CtrlbiasSeqDelAPS 1800 967 320 51 | CtrlbiasDelAPS 1800 967 350 52 | biasSendReqPdY 1800 967 850 53 | biasSendReqPdX 1800 967 1150 54 | CtrlbiasGB 1800 711 1150 55 | TDbiasReqPuY 1800 711 200 56 | TDbiasReqPuX 1800 711 1200 57 | APSbiasReqPuY 1800 711 1100 58 | APSbiasReqPuX 1800 711 830 59 | APSVrefL 3300 967 3000 60 | APSVrefH 3300 967 3200 61 | APSbiasOut 3300 967 660 62 | APSbiasHyst 3300 967 455 63 | APSbiasTail 3300 967 520 64 | TDbiasCas 3300 839 2000 65 | TDbiasInv 3300 967 800 66 | TDbiasDiffOff 3300 967 450 67 | TDbiasDiffOn 3300 967 625 68 | TDbiasDiff 3300 967 500 69 | TDbiasFo 3300 711 3050 70 | TDbiasRefr 3300 711 2850 71 | TDbiasPR 3300 711 2800 72 | TDbiasBulk 3300 711 2680 73 | biasBuf 3300 967 0 74 | biasAPSreset 3300 711 1000 75 | 76 | [DVS_BIAS_LEFT] 77 | 78 | cas 52458 79 | injg 101508 80 | reqPd 16777215 81 | pux 8053457 82 | diffoff 133 83 | req 160712 84 | refr 944 85 | puy 16777215 86 | diffon 639172 87 | diff 30108 88 | foll 20 89 | pr 5 90 | 91 | [DVS_BIAS_RIGHT] 92 | 93 | cas 52458 94 | injg 101508 95 | reqPd 16777215 96 | pux 8053457 97 | diffoff 133 98 | req 160712 99 | refr 944 100 | puy 16777215 101 | diffon 639172 102 | diff 30108 103 | foll 20 104 | pr 5 105 | 106 | [SKIN_CNFG] 107 | samplesTxEn true 108 | eventsTxEn true 109 | 110 | asrFilterType false 111 | asrFilterEn false 112 | egNthrEn true 113 | preprocSamples true 114 | preprocEg true 115 | driftCompEn false 116 | samplesTxMode true 117 | # enable 16 bits coding (true) or 8 bits coding (false) 118 | samplesRshift 0 119 | # if samples TxMode = false needs to set the shift value 120 | samplesSel false 121 | #Samples source (0: pre-proc, 1: post-preproc); 122 | resamplingTimeout 50 123 | #timebase 50ns 124 | 125 | evGenSel 1 126 | 127 | G1upthresh 0.1 128 | G1downthresh 0.1 129 | G1upnoise 12.0 130 | G1downnoise 12.0 131 | 132 | G2upthresh 30 133 | G2downthresh 30 134 | G2upnoise 50 135 | G2downnoise 50 136 | 137 | #evNeuralUseSA1 138 | SA1inhibit 524288 139 | SA1adapt 328 140 | SA1decay -328 141 | SA1rest 2621 142 | 143 | #evNeuralUseRA1 144 | RA1inhibit 327680 145 | RA1adapt 3 146 | RA1decay -6552 147 | RA1rest 65536 148 | 149 | evNeuralUseRA2 150 | RA2inhibit 327680 151 | RA2adapt 3 152 | RA2decay -3276 153 | RA2rest 2621 154 | -------------------------------------------------------------------------------- /cpp_tools/zynqGrabber_deprec/zynqGrabber.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | zynqGrabber 6 | hardwareIO 7 | Interfaces to DVS or ATIS sensors connected with zynq-based boards 8 | Released under the terms of the GNU GPL v2.0 9 | 1.0 10 | 11 | 12 | Reads/writes events as well as programs biases for event-based chips connected using a zynq-board interface. 13 | 14 | 15 | 16 | name 17 | errorcheck 18 | verbose 19 | biaswrite 20 | jumpcheck 21 | iBias 22 | collerDevice 23 | ATIS_BIAS_LEFT 24 | ATIS_BIAS_RIGHT 25 | dataDevice 26 | readPacketSize 27 | bufferSize 28 | maxBottleSize 29 | 30 | 31 | 32 | Chiara Bartolozzi 33 | Arren Glover 34 | 35 | 36 | 37 | 38 | 39 | vBottle 40 | /zynqGrabber/vBottle:i 41 | 42 | Accepts events to be sent to the hardware device (i.e. SpiNNaker) 43 | 44 | 45 | 46 | 47 | vBottle 48 | /zynqGrabber/vBottle:o 49 | 50 | The events from the camera (or other device) are published on this port 51 | 52 | 53 | 54 | 55 | yarp::os::Bottle 56 | /zynqGrabber/eventCount:o 57 | 58 | Outputs the number of events read by the zynqGrabber 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | /zynqGrabber/rpc 67 | 68 | Start stop the device 69 | Set the bias values for a camera 70 | 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /documentation/1viewer.md: -------------------------------------------------------------------------------- 1 | # Testing your install with the viewer 2 | 3 | Visualise a stream of events either from real hardware/camera or from a pre-recorded sequence. 4 | 5 | ## Description 6 | 7 | This application demonstrates how to visualise a stream of address events either from the cameras or from a pre-recorded sequence. 8 | The event stream is transmitted from the cameras (`/zynqGrabber/AE:o`) to the vPreProcess (`/vPreProcess/AE:i`), that removes salt-and-pepper noise from the event stream. The filtered stream (`/vPreProcess/left:o` and `/vPreProcess/right:o`) is sent to vFramer (`/vFramer/left/AE:i`), that converts it to a yarpview-able image. The "images" from left (`/vFramer/left/image:o`) and right camera (`/vFramer/right/image:o`) are then sent to the yarp viewers (`/viewCh0` and `/viewCh1`). 9 | 10 | Here is a visualisation of the instantiated modules and connections. 11 | 12 | 13 | 14 | ## How to run the application 15 | 16 | These are basic instructions for first time YARP users, assuming the [comprehensive instructions](full_installation.md) have been followed. 17 | 18 | ### Using actual hardware/camera? Skip [ahead](#using-real-hardwarecamera). Using a dataset? Follow these instructions: 19 | 20 | * Download the sample dataset from [here](https://doi.org/10.5281/zenodo.2556755) and unpack to a location of your choosing. 21 | * Set the yarp namespace: 22 | ```bash 23 | yarp namespace / 24 | ``` 25 | * Run a yarpserver: 26 | ```bash 27 | yarpserver --write 28 | ``` 29 | * In a separate terminal, run a yarpdataplayer: 30 | ```bash 31 | yarpdataplayer 32 | ``` 33 | * In the yarpdataplayer gui, open the downloaded datsets (File->open) by pressing "Choose" on the upper-level folder (e.g. folder name: fasthandtrim). 34 | * The dataset should be open in the yarpdataplayer window with a "Port Name" of `/zynqGrabber/AE:o`. 35 | 36 | ### Using real hardware/camera? 37 | 38 | * You should have followed the instructions [to run install hardware and run zynqGrabber](README.md). 39 | * If everything went smoothly, your laptop should be connected to the `yarpserver` running on the ZCB, your zynqGrabber should be running and a port `/zynqGrabber/AE:o` should be already open (check with `yarp name list`). 40 | 41 | ### Okay - yarpdataplayer users, and hardware users back together here: 42 | * In a separate terminal, run a yarpmanager: 43 | ```bash 44 | yarpmanager 45 | ``` 46 | * The yarpmanager gui should be open. 47 | * On the entities tab (left) open the "Applications" folder - the vView application should be visible. Double click the `event-viewer-example` application to load it. 48 | * If you do not have the `event-viewer-example` application, make sure you followed the installation steps correctly, and that your `YARP_DATA_DIRS` environment variable correctly points to the share folders in your install folder (`echo $YARP_DATA_DIRS`) 49 | * Run all the modules in the `event-viewer-example` app by choosing "Run All" in the left-most vertical toolbar. All applications should become green. 50 | * If not all applications are green, it means the executable files could not be found on the `PATH`. Verfify your installation and your `PATH` environment variable (`echo $PATH`). 51 | * Connect all yarp ports by choosing "Connect All" in the same toolbar. All connections should turn green. 52 | * Press "Play" on the yarpdataplayer. The dataset should be visible in the yarpview windows (split into left and right cameras). 53 | * To close all applications first "Disconnect All" and then "Close All" on the left-hand toolbar. GUI's are closed as per a normal window. The yarpserver can be closed using `ctrl+c` in the appropriate terminal. 54 | -------------------------------------------------------------------------------- /documentation/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started with `event-driven` 2 | 3 | You may have some hardware in front of you that you are unsure of how to use, if not, and you just need help with software, skip [ahead](#software). If you have any questions with the these instructions, please open a GitHub issue on the `event-driven` repository page. 4 | 5 | ## Hardware 6 | 7 | * Hardware could include sensors (_e.g._ ATIS, event-driven skin, cochlea, or IMU), as well as acquisition boards based on zynq chips (_e.g._ ZCB or z-turn). We'll assume the hardware is correctly connected and powered. :warning: However, if you are truly starting from scratch, you may need to [set-up an sd-card for a ZCB or z-turn](howtosetupSD.md). If you just need to connect to your hardware, skip this step. 8 | 9 | * If the system should be already ready-to-go, and you just need the board to start acquisition, you can follow these instructions to [connect to the ZCB or z-turn.](connect_to_zcb.md). 10 | 11 | * Once you have a connection to the board, you need to [set-up the yarpserver](setup_yarpserver.md) to correctly communicate between your laptop and the board. 12 | 13 | * Finally you will need to [run `zynqGrabber`](zynqGrabber.md). 14 | 15 | * At this point, if you have checked your hardware is streaming events, you can start to read and process them, once your software is correctly installed on the laptop as well. 16 | 17 | ## Software 18 | 19 | * The first step is to install the dependencies - typically just [YARP](https://github.com/robotology/yarp), is required but we reccommend [YCM](https://github.com/robotology/ycm), as well as [OpenCV](https://opencv.org/) for visualisation - and the `event-driven` library itself. The [instructions for installation](full_installation.md) should be all you need to do to have a working environment. 20 | * We suggest following the [getting started with visualising events](1viewer.md) tutorial to check everything is working, and to learn about the `yarpdataplayer`. If you can't get the tutorial working, double check everything is installed correctly, and if you can't find the problem, please open an issue on the `event-driven` repository page. 21 | 22 | * Finally, once you are ready, you can [learn to write a basic processing module](example_module.md) which covers, reading and writing events, and understanding event timestamps. Your module probably needs a nice way of: 23 | * [TODO: visualising your output](), and 24 | * possibly needs to [TODO: correct for lens disortion or external other external calibration]() 25 | given you have [calibrated your camera or stereo pair.](calibration/README.md) or [calibrate the an iCub robot with the calibration-supervisor](calibration/README.md) 26 | 27 | * You can also learn to [TODO: save and playback]() data for offline processing and experiment re-creation. 28 | -------------------------------------------------------------------------------- /documentation/SpiNNakerips.md: -------------------------------------------------------------------------------- 1 | # Change SpiNNaker IP address 2 | 3 | The SpiNNaker-5 board has two ethernet ports, one for management (located more central) and one for loading a model onto the cores (located more external). The management port can be used to change the IP address of both ports. 4 | 5 | * Establish a connection to the management port. Either connect to the same local network, or make an ad-hoc connection with a direct cable connection and setting your own laptop IP to the same subnet (changing only the final value of the IP address). Make sure there is a connection using the ``ping`` command. The IP address of the management port could be: 6 | * 192.168.240.0 (default) 7 | * 10.0.0.70 (EDPR board) 8 | * The spinnaker_tools package provides an application ``bmpc`` to communicate with the management port: 9 | 10 | > bmpc 10.0.0.70 11 | 12 | * A connection should be made, and you should be greated by the bmpc command prompt. Type ``help`` for information. Visualise the IP address of the board with: 13 | 14 | > spin_ip 15 | 16 | > bmp_ip 17 | 18 | * Change the IP (and more with) 19 | 20 | > spin_ip 21 | 22 | for example 23 | 24 | > spin_ip c059 00:00:a4:f0:00:01 10.0.0.7 10.0.0.1 255.255.0.0 17893 25 | 26 | and 27 | 28 | > bmp_ip c000 00:00:a4:f0:00:00 10.0.0.6 10.0.0.1 255.255.0.0 17893 29 | 30 | * Restart the board for the new IP addresses to take affect. 31 | 32 | -------------------------------------------------------------------------------- /documentation/application_instructions/3a.dualcam.md: -------------------------------------------------------------------------------- 1 | # Visual Transform for Dual Frame/Event camera lens 2 | 3 | ## Introduction 4 | 5 | The vMapping app is used to calibrate and visualize the events onto the frame-based image. 6 | This module is needed within the iCub setup which combines both traditional and event cameras 7 | to map the events in the corresponding location of the image provided by the normal cameras. 8 | The calibration is performed using the [asymmetric circle grid board](https://nerian.com/support/resources/patterns/) 9 | similarly to the classical calibration procedure. Both events and images need to be undistorted since the mapping is planar. 10 | If no calibration is required the module will simply output the image overlapped with the events. 11 | 12 | ## Dependencies 13 | 14 | This app does not need any further module to be running. The intrinsic parameters of both traditional and event camera 15 | have to be computed beforehand and are loaded from specific files within the *cameraCalibration* context as default setting. 16 | 17 | Here is a visualisation of the instantiated modules and connections. 18 | 19 | ![DualCamTransform visualization](http://robotology.github.io/event-driven/doxygen/images/DualCamTransform_builder.png) 20 | 21 | In case of calibration also a [vFramer](http://robotology.github.io/event-driven/doxygen/doc/html/group__vFramer.html) is required to find the corresponging points between events and frames. Below a visualization of instantiated modules and connection for the calibration setup. 22 | 23 | ![DualCamTransform calib_visualization](http://robotology.github.io/event-driven/doxygen/images/DualCamTransform_calib_builder.png) 24 | 25 | ## How to run the application 26 | 27 | * The application assumes you are connected to a *yarpserver* - see http://www.yarp.it/ for basic instructions for using yarp. 28 | 29 | * Inside the Application folder in the yarpmanager gui, you should see an entry called *mapping*. Double click and 30 | open it. 31 | 32 | * Make sure the cameras are connected properly. 33 | 34 | * Now you are ready to run the application! Hit the run button and then connect on the yarpmanager gui. You can avoid launching the vFramer if no calibration is necessary. 35 | 36 | * If you want to perform the calibration then after the application has started and the ports are connected you should see two windows showing the events (in form of blobs. See vFramer documentation) and the camera image. Move the calibration board in front of the cameras until enough images have been collected (number of images required is set with the maxIter parameter of the vMapping module). When calibration is over the results are saved in the vMapping.ini file within the context folder (*cameraCalibration* by default) . 37 | 38 | * Once the calibration is done (or in case of no calibration) the yarpview windows will show the image overlapped with the events. 39 | -------------------------------------------------------------------------------- /documentation/application_instructions/3b.autosaccade.md: -------------------------------------------------------------------------------- 1 | # iCub Micro-saccade and Attention Demo 2 | 3 | Introduction 4 | ------------ 5 | This module is used to generate events in static scenes. When the event rate drops below a certain threshold, a 6 | circular motion of the eye is triggered. The resulting event stream gives an idea about the shapes of the objects in 7 | the scene. Instead, if there are enough events, then the robot gazes to their center of mass. The app launches the 8 | zynqGrabber which sends the events both to the vFramer for visualization and to the autosaccade module which decides 9 | how to move the robot eyes. 10 | 11 | In the following image an overview of the opened ports and how they are connected. 12 | 13 | ![autosaccade visualization](http://robotology.github.io/event-driven/doxygen/images/autoSaccadeApp_builder.png) 14 | 15 | Dependencies 16 | ------------ 17 | This app needs the robot (or the [simulator](http://wiki.icub.org/brain/group__icub__Simulation.html)) to be up and 18 | running together with the [yarprobotinterface](http://www.yarp.it/yarprobotinterface.html) and the [iKinGazeCtrl](http://wiki.icub.org/brain/group__iKinGazeCtrl.html) . 19 | 20 | How to run the application 21 | -------------------------- 22 | 23 | On a console, run yarpserver (if not already running). 24 | 25 | You can now run yarpmanager. 26 | 27 | Inside the Application folder in the yarpmanager gui, you should see an entry called vAutosaccadeDemo. Double click and 28 | open it. 29 | 30 | Run the robot (or the iCubSim), the yarprobotinterface and the iKinGazeCtrl. Make sure to specify the correct robot 31 | name passing the proper parameter to the autosaccade module. 32 | 33 | Now you are ready to run the application! Hit the run button and then connect on the yarpmanager gui. 34 | 35 | You will now see the robot (or the simulator) gazing to the center of mass of the events or, if the event rate is not 36 | high enough, performing a circular eyes motion to generate events. 37 | -------------------------------------------------------------------------------- /documentation/application_instructions/4balldemo.md: -------------------------------------------------------------------------------- 1 | # Ball Tracking with Head and Arm following 2 | 3 | This tutorial explains how to perform ball tracking with the event-driven cameras. The Robot can be commanded to look at the position of the ball, and also move its arm to the position of the ball. 4 | 5 | ![icubandball](http://robotology.github.io/event-driven/doxygen/images/icubandball.png) 6 | 7 | ## How it works 8 | 9 | Ball tracking is performed using a particle filter. There are limits on the maximum and minimum size of the ball, and thresholds for minimum "roundness" to be successfully detected. The output of the particle filter is a ev::GaussianAE event which describes the position and size of the ball and whether the ball is over the "true detection threshold". The visualisation will display the ball position as blue if the detection score is above the threshold, and red otherwise. 10 | 11 | The application is run with ball tracking performed on both the left and right event-streams. The detected position in the left and right cameras are sent to the vGazeDemo module, which checks the consistency between left and right streams (are they tracking the same ball?) and if both are above the "true detection threshold". If all checks pass, the vGazeDemo module uses the iCub Gaze Controller to look at the position of the ball, using the 3D position calculated from stereo triangulation. The left arm can also be commanded to move to the same position by setting the appropriate flag on start-up. 12 | 13 | The ball should then be moved in front of the robot to have the gaze of the robot follow its position. If the position of the ball is lost for some seconds, the particles are resampled to the centre of the image, and the ball will have to be moved to the centre of the image to regain tracking. 14 | 15 | ## Dependencies 16 | 17 | To move the robot a iKinGazeCtrl module needs to be running and connected to the yarprobotinterface (a robot or simulator is required). The iKinGazeCtrl also needs a correctly calibrated camera parameter file. 18 | 19 | ## How to run the application 20 | 21 | The application assumes you are connected to a *yarpserver* - see http://www.yarp.it/ for basic instructions for using yarp. 22 | 23 | 1. run a yarpmanager 24 | 2. in the yarpmanager, find and open the vGazeDemo application. If the applciation is not available the event-driven library has not been correctly installed, or the path to the icub-contrib-common install folder is not correctly set. 25 | 3. the zynqGrabber in the modules panel can only be used if you have the robot environment (with yarprun) correctly installed and set-up. If not, you can manually run your event grabbing module on the computer connected to the camera, or use a yarpdataplayer to play a dataset instead. 26 | 4. run all modules (ignoring zynqGrabber if you have a custom grabber, and ignoring vGazeDemo if no robot is present). 27 | 5. the visualation will show the current estimate of the ball position on the left and the right cameras. 28 | 29 | ![connections](http://robotology.github.io/event-driven/doxygen/images/vGazeDemoConnections.png) 30 | 31 | note: shmem connections can only be used if the modules are running on the same physical machine. 32 | 33 | ## Known issues 34 | 35 | * Flashing lights, or reflections off the surface of the ball can cause tracking loss, as the conditions for a ball to be "hollow" are no longer met. 36 | * There is an upper limit on the speed at which the ball can be moved. This is due to computational limits rather than sensor limits. In post-processing (off-line) the ball can be tracked at any speed. 37 | -------------------------------------------------------------------------------- /documentation/application_instructions/5corners.md: -------------------------------------------------------------------------------- 1 | # How to Detect and Visualise a Stream of Corner Events 2 | 3 | Detect and visualise a stream of corner events from the cameras / from a pre-recorded sequence. 4 | 5 | ## Description 6 | 7 | This application demonstrates how to detect and visualise a stream of corner events either from the cameras or from a pre-recorded sequence. 8 | The event stream is transmitted from the cameras (/zynqGrabber/vBottle:o) to the vPreProcess (/vPreProcess/vBottle:i), that removes salt-and-pepper noise from the event stream. The filtered stream (/vPreProcess/vBottle:o) is sent to vCorner (/vCorner/vBottle:i) that detects corners whose Harris score is higher than a threshold. Both address events and corner events are sent to the vFramer, that has specific ports for different kinds of events (/vFramer/AE:i for address events, /vFramer/LAE:i for corner events). Address events and corresponding corner events are visualised on the yarp viewers (/viewLeft and /viewRight). 9 | 10 | Here is a visualisation of the instantiated modules and connections. 11 | 12 | ![vCorner visualization](http://robotology.github.io/event-driven/doxygen/images/vCorner.png) 13 | 14 | For more details on the algorithm, please refer to: Vasco V., Glover A., and Bartolozzi C. (2016) Fast event-based harris corner detection exploiting the advantages of event-driven cameras. In IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), October 2016, Daejeon, Korea. 15 | 16 | ## Dependencies 17 | 18 | No special dependencies are required, all the required modules will be executed by the application. 19 | 20 | ## How to run the application 21 | 22 | The application assumes you are connected to a *yarpserver* - see http://www.yarp.it/ for basic instructions for using yarp. 23 | 24 | Inside the *Application* folder in the yarpmanager gui, you should see an entry called *vCorner*. Double click and open it. 25 | 26 | Now you are ready to run the application. Hit the *run* button and then *connect* on the yarpmanager gui. 27 | 28 | You will now see the yarpview windows displaying the address events and the corner events (in red). 29 | 30 | To visualise events from a pre-recorded dataset, you can run *yarpdataplayer*. 31 | 32 | Since *yarpdataplayer* opens the port with the same name as the real robot, make sure the same port is not running (or that you start an instance of the nameserver with your own namespace). 33 | 34 | ## Known issues 35 | 36 | * There is a maximum event-rate at which corners can be computed, depending on computaitonal power. If this limit is reached, surplus events will not be checked for corners. The result is that corner trajectories can have missing segments if the camera is moved at very high speeds (over 1 million events per second). 37 | 38 | -------------------------------------------------------------------------------- /documentation/application_instructions/6vergence.md: -------------------------------------------------------------------------------- 1 | # iCub Vergence Demo 2 | 3 | Control the iCub to verge on a stimulus placed in the center of the field of view. 4 | 5 | ## Description 6 | 7 | This application demonstrates how the iCub verges on a stimulus placed in the center of the field of view, based on the responses of a set of binocular Gabor filters. 8 | The event stream is transmitted from the cameras (/zynqGrabber/vBottle:o) to vPreProcess (/vPreProcess/vBottle:i), that removes salt-and-pepper noise. The filtered stream (/vPreProcess/vBottle:o) is sent to vVergence (/vVergence/vBottle:i), that updates the responses of the filter bank and sends a command to the robot encoders. Events from left and right cameras are superimposed (/vVergence/debug:o) and sent to the yarp viewer (/viewDebug). 9 | 10 | The *depthgt* module is useful for evaluating the performances of the algorithm, but not necessary for the demonstration. It automatically connects to the device (/OpenNI2/depthFrame:o) that exposes depth image from a Kinect sensor (/depthgt/depthim:i) and produces a depth value that can be used as ground truth (/depthgt/gt:o). The depth image (/depthgt/depthim:o) is then sent for visualisation to the yarp viewer (/viewGT). 11 | 12 | Here is a visualisation of the instantiated modules and connections. 13 | 14 | ![vVergence visualization](http://robotology.github.io/event-driven/doxygen/images/vVergence.png) 15 | 16 | If you're going to use this controller for your work, please quote it within any resulting publication: V. Vasco, A. Glover, Y. Tirupachuri, F. Solari, M. Chessa, and Bartolozzi C. Vergence control with a neuromorphic iCub. In IEEE-RAS International Conference on Humanoid Robots (Humanoids), November 2016, Mexico. 17 | 18 | ## Dependencies 19 | 20 | This application assumes that [yarprobotinterface](http://www.yarp.it/yarprobotinterface.html) is running. 21 | This application requires [OpenNI2](http://wiki.icub.org/wiki/OpenNI2) installed to obtain the ground truth depth image, but it is not necessary for the demonstration. 22 | 23 | ## How to run the application 24 | 25 | The application assumes you are connected to a *yarpserver* - see http://www.yarp.it/ for basic instructions for using yarp. 26 | 27 | Inside the *Application* folder in the yarpmanager gui, you should see an entry called *vVergence*. Double click and open it. 28 | 29 | Hit the *run* button and then *connect* on the yarpmanager gui. 30 | You will now see the yarpview windows, displaying events from both camera and the ground truth depth image (if you are using a depth sensor). 31 | An additional yarpscope window will open, to visualise the responses of the filter bank. This loads the xml file *scope_filtersConf.xml*, in the *vergenceController*, 32 | that you can modify according to your needs. 33 | 34 | The iCub is not controlled yet. To start the control, open a terminal and type 35 | 36 | yarp rpc /vVergence/rpctrigger:i 37 | 38 | You can send commands to the *vergenceController*. 39 | Type `start` in the command prompt. You should get the following output: 40 | 41 | >>start 42 | Response: "Starting Verging..." 43 | 44 | Now the vergence is controlled and the iCub will start verging on the object. 45 | Move the object in depth to see the iCub vergence following the taget. 46 | You should always see events from left and right cameras superimposed in the yarpview window *viewDebug*. 47 | 48 | When you are done, type `reset` in the command prompt. This will stop the *vergenceController*. 49 | You should get the following output: 50 | 51 | >>reset 52 | Response: "Resetting..." 53 | -------------------------------------------------------------------------------- /documentation/calibration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/robotology/event-driven/7dbd368ab2028fe645b0ed7ccdede10685852b56/documentation/calibration.png -------------------------------------------------------------------------------- /documentation/calibration/README.md: -------------------------------------------------------------------------------- 1 | # Event-driven camera calibration 2 | 3 | Here we have instructions to calibration your event-driven camera: 4 | 5 | ### [Calibrate a mono, stereo-pair or mixed RGB stereo-event pair](stereo_calibration.md) 6 | 7 | ### [Calibrate the iCub stereo pair using calibration-supervisor](supervisor.md) 8 | 9 | 10 | -------------------------------------------------------------------------------- /documentation/calibration/RealSense_conf.ini: -------------------------------------------------------------------------------- 1 | device RGBDSensorWrapper 2 | subdevice realsense2 3 | name /depthCamera 4 | 5 | [SETTINGS] 6 | depthResolution (480 270) #Other possible values (424 240) or (640 480) 7 | rgbResolution (424 240) #Other possible values (424 240) or (640 480) 8 | framerate 30 9 | enableEmitter true 10 | needAlignment true 11 | alignmentFrame RGB 12 | 13 | [HW_DESCRIPTION] 14 | clipPlanes (0.2 10.0) 15 | 16 | -------------------------------------------------------------------------------- /documentation/calibration/stereoCalib.ini: -------------------------------------------------------------------------------- 1 | standalone 2 | 3 | [STEREO_CALIBRATION_CONFIGURATION] 4 | 5 | MonoCalib 0 6 | boardWidth 8 7 | boardHeight 6 8 | boardSize 0.008 9 | numberOfPairs 20 10 | #boardType ASYMMETRIC_CIRCLES_GRID 11 | 12 | -------------------------------------------------------------------------------- /documentation/calibration/stereoCalib.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | stereoCalib 4 | Description 5 | 1.0 6 | 7 | 8 | stereoCalib 9 | --from ~/.local/share/yarp/contexts/cameraCalibration/stereoCalib.ini 10 | localhost 11 | 12 | 13 | 14 | yarpdev 15 | --from /usr/local/src/robot/librealsense/RealSense_conf.ini 16 | localhost 17 | 18 | 19 | 20 | vPreProcess 21 | --filter_temporal false --filter_spatial false --split_stereo 22 | localhost 23 | 24 | 25 | 26 | vFramerLite 27 | --displays "(/right (BLACK))" --frameRate 30 --eventWindow 0.034 28 | localhost 29 | 30 | 31 | 32 | yarpview 33 | --name /view/leftCalib:i --x 567 --y 490 --w 500 --h 400 --synch 34 | localhost 35 | 36 | 37 | 38 | yarpview 39 | --name /view/rightCalib:i --x 567 --y 1060 --w 500 --h 400 --synch 40 | localhost 41 | 42 | 43 | 44 | /depthCamera/rgbImage:o 45 | /stereoCalib/cam/left:i 46 | fast_tcp 47 | 48 | 49 | 50 | /stereoCalib/cam/left:o 51 | /view/leftCalib:i 52 | fast_tcp 53 | 54 | 55 | 56 | /zynqGrabber/AE:o 57 | /vPreProcess/AE:i 58 | fast_tcp 59 | 60 | 61 | 62 | /vPreProcess/right:o 63 | /vFramer/right/AE:i 64 | fast_tcp 65 | 66 | 67 | 68 | /vFramer/right/image:o 69 | /stereoCalib/cam/right:i 70 | fast_tcp 71 | 72 | 73 | 74 | /stereoCalib/cam/right:o 75 | /view/rightCalib:i 76 | fast_tcp 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /documentation/calibration/stereo_calibration.md: -------------------------------------------------------------------------------- 1 | # Event-driven camera calibration 2 | 3 | The following procedure is used to calibrate the event-camera intrinsic parameters, as well as the stereo extrinsic parameters to a second camera, being either event-based or traditional RGB. The most complex prodecure or stereo calibration to RGB camera is described first, and the simplification to achieve mono, or event-driven stereo pair are described afterwards. The calibration follows the classic procedure detecting a predefined checkerboard pattern placed in front of the cameras. 4 | 5 | ## Requirements 6 | 7 | * YARP (sending receiving events and images) 8 | * icub-main (the main calibration software which wraps opencv) 9 | * event-driven (event reading and forming images) 10 | * RGB and/or event-based camera able to connect with Yarp 11 | * tablet (prefered) or laptop/portable screen 12 | 13 | ## Method of calibration 14 | 15 | ### The checkerboard pattern 16 | 17 | The calibration method described uses the stardard checkerboard pattern seen from multiple different viewpoints in front of the camera. However, a fiducial that can be simultaneously detected as a checkerboard from both the event- and RGB-camera is required to enable the stereo calibration. To do so we have provided a flashing video file, which flashes fast enough to be ignored by an RGB camera, and is tuned to the event visualisation method to produce also a reliable checkerboard image also from the event-camera. The [checkerboard video](video_checkboard.mp4) should be played back on a portable screen, we suggest a tablet/pad or possibly a lightweight laptop/monitor. Depending on the screen type the video can be used in two ways: 18 | 19 | * Use the screen refresh rate: set the screen refresh to 60 Hz and play and pause the video. The screen refresh rate itself may be detectable by the event-camera and RGB-camera without anything further required. 20 | * Use the video flash: some screens may not refresh in a compatible way, in which case the video must be played on repeat/loop to achieve the desired effect. 21 | 22 | :warning: in either of these cases, it may be necessary to play with the screen brightness to balance event-producation with edge bleeding. 23 | 24 | ### Creating the event image 25 | 26 | The events from the event-camera must first be sent to the visualisation method to produce an image from the asynchronous pixel firing pattern. The `vFramerLite` application is used to do so using the `BLACK` draw type, and with an event window of 34ms to capture both on and off frames of the flashing video: 27 | 28 | `--displays "(/right (BLACK))" --frameRate 30 --eventWindow 0.034` 29 | 30 | :bulb: don't worry a YARP application is provided that sets all parameters correctly. 31 | 32 | ### Running the calibration procedure 33 | 34 | The calibration is performed by sending images from the RGB camera and the output of the `vFramerLite` to the stereo-calibration module found in `icub-main`, in this case, using the `YARP` framework. A `yarpmanager` application is available to automatically set-up the required applications and connections: 35 | 36 | * see [documentation](../README.md) to set-up the software and run a `yarpserver`. 37 | * open a `yarpmanager` (e.g. from command line) 38 | * in the GUI click open and search for the location you have saved the (application file)[stereoCalib.xml] 39 | * In the entities find `stereoCalib`, double-click, then click `run all` and `connect all`. 40 | * Check the `--from` arguments on the relevant modules and point them to the files for (calibration parameters)[stereoCalibe.ini] and (if using) (realsense parameters)[RealSense_config.ini] 41 | * Open a terminal and run a remote procedure call: `yarp rpc /stereoCalib/cmd` 42 | * Send the command: `>>start` 43 | * Move the flashing checkerboard infront of both cameras, it should be apparent when a checkerboard is found, and the output of both cameras should be visualised to debug the images. 44 | 45 | :bulb: The calibration between frame-based camera and event-driven camera can work also with different resolutions of the cameras. 46 | 47 | ### Setting calibration parameters 48 | 49 | The `stereoCalib` works exploiting the information in the `stereoCalib.ini` file. 50 | Please, check the parameters carefully. 51 | 52 | * standalone: is required to run without the iCub kinematic chain 53 | * boardWidth: number of corners to detection horizontally 54 | * boardHeight: number of corners to detection vertically 55 | * boardSize: the length in metres of the checkerboard square. :warning: as the physical size will change depending on the screen/tablet resolution and size, this will need to be measured depending on the device used. 56 | * numberOfPairs: how many images to collect 57 | 58 | ### Running a mono calibration 59 | 60 | Follow the above procedure with the following adjustments 61 | 62 | * do not run the RGB camera, only the event-camera. Send only the `vFramerLite` output to the camera calibration software 63 | * in the `stereoCalib.ini` file set the parameter: `MonoCalib 1` (i.e. do only a mono camera calibration) 64 | 65 | ### Running a stereo event-camera calibration 66 | 67 | To run a stereo event-camera calibration both inputs to the `stereoCalib` module need to come from the `vFramerLite` 68 | 69 | * set the `vFramerLite` parameters to: `--displays "(/right (BLACK) /left (BLACK))" --frameRate 30 --eventWindow 0.034` 70 | * modify the port connection settings to send events to the input of `vFramerLite` appropriately and send the image outputs to the `stereoCalib` module. 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /documentation/calibration/supervisor.md: -------------------------------------------------------------------------------- 1 | # camera-calibration-supervisor 2 | 3 | This README is a guide to use the [camera-calibration-supervisor 4 | ](https://github.com/robotology/camera-calibration-supervisor) for the ATIS cameras mounted on iCub. If you need more information just have a look to the original repo! 5 | 6 | 7 | The stereo calibration or the mono calibration allows obtaining intrinsic and extrinsic parameters removing distortions from the lenses. 8 | The camera-calibration-supervisor is a user-friendly framework that identifies the best chessboard poses guiding you into the next chessboard position. This set of positions are the best set to obtain a good calibration. 9 | 10 |

11 | 12 |

13 | 14 | 15 | ## How to set up your calibration on the robot 🤖 16 | 17 | ### Requirements 18 | * Laptop with the installed camera-calibration-supervisor app: icub23 19 | * Checkerboard-fast: SAMSUNG tablet 20 | * iCub's eyes vergence ~ 5 21 | 22 | Let's start with the calibration on the robot! 23 | 24 | * First of all you need to turn on the iCub's motors and CPU by clicking the buttons on the side of the iCub's backpack. 25 | Then, run `yarpmanager` and run `icub-head`,`icub-zynq` & `icub-23/24`from the Cluster. 26 | 27 | * From Entities search for`iCubStartup-eventdriven` application and run: 28 | 29 | * `yarplogger` to check any error occurring using the robot. 30 | * `yarprobotinterface` to run the robot. 31 | * Do not forget to shift iCub's eyes vergence to ~ 5 (`yarpmotorgui` from the terminal, /head, JOINT5) 32 | 33 | * From Entities search for `vViewCamCalibSupervisor` application, run and connect everything. 34 | 35 | 36 | * From Entities search for `Event_Cam_Calib_Supervisor_App` application and run all the `yarpviews`. 37 | * Then, from a terminal run `stereoCalib --from icubEyes_ATIS-board-gazebo.ini`. 38 | * Finally, run `calibSupervisor` from the `Event_Cam_Calib_Supervisor_App` application and connect everything. 39 | 40 | Please, make sure to check the .ini file. It should look like the one below: 41 | 42 | ```ruby 43 | [CAMERA_CALIBRATION_RIGHT] 44 | 45 | projection pinhole 46 | drawCenterCross 0 47 | 48 | w 304 49 | h 240 50 | fx 195.625 51 | fy 195.719 52 | cx 141.748 53 | cy 131.03 54 | k1 -0.341102 55 | k2 0.0972876 56 | p1 -0.00761546 57 | p2 0.00759488 58 | 59 | [CAMERA_CALIBRATION_LEFT] 60 | 61 | projection pinhole 62 | drawCenterCross 0 63 | 64 | w 304 65 | h 240 66 | fx 179.823 67 | fy 178.975 68 | cx 157.431 69 | cy 114.611 70 | k1 -0.304171 71 | k2 0.193928 72 | p1 -0.000112923 73 | p2 -0.0111818 74 | 75 | [STEREO_DISPARITY] 76 | HN (0.999425 0.00522887 -0.0335127 0.096062 -0.00676219 0.998928 -0.0458046 -0.0046386 0.0332373 0.0460048 0.998388 0.0465875 0 0 0 1) 77 | QL (-0.000767 -0.000192 -0.026078 0.003068 -0.003068 -0.001534 -0.000479 0.043191) 78 | QR (-0.000767 -0.000192 -0.026078 0.003068 -0.003068 -0.001534 -0.000479 -0.043191) 79 | 80 | [STEREO_CALIBRATION_CONFIGURATION] 81 | boardWidth 8 82 | boardHeight 6 83 | boardSize 0.021675 84 | numberOfPairs 30 85 | ``` 86 | 87 | * Open a yarp rpc port: `yarp rpc /stereoCalib/cmd` and type `start`. You should get something like this: 88 | ``` 89 | >>start 90 | Response: "Starting Calibration..." 91 | ``` 92 | This step should be handled by `stereoCalib`, but if you want to monitor the ongoing process I advise you to run `stereoCalib` from a terminal looking at what the application returns. 93 | * Now, your calibration is ready to start. Therefore, take your chessboard and put the `yarpviews` (/display, /viewleft and /viewright) visible whilst you are moving your board. The calibration process needs to collect several good positions for the board that is equal to the `numberOfPairs` of the .ini file. Please, be sure `stereoCalib` is collecting the images writing: `[INFO] Saving images number #` 94 | 95 | * Once you have collected all the positions is the time to obtain the intrinsic and extrinsic parameters running the script: 96 | ```ruby 97 | modify-params.sh icubEyes_ATIS-board-gazebo.ini outputCalib.ini cameraCalibration 98 | ``` 99 | This script will overwrite the results on the initial .ini file. You should obtain something like this: 100 | ```ruby 101 | Using file /home/icub/.local/share/yarp/robots/iCubGenova02/icubEyes_ATIS-board-gazebo.ini 102 | stereoCalib writes the following file: /home/icub/.local/share/yarp/contexts/cameraCalibration/outputCalib.ini 103 | 104 | Running script...with params /home/icub/.local/share/yarp/robots/iCubGenova02/icubEyes_ATIS-board-gazebo.ini /home/icub/.local/share/yarp/contexts/cameraCalibration/outputCalib.ini cameraCalibration 105 | 106 | 107 | Script completed successfully... 108 | ``` 109 | 110 | * Let's now check our calibration! Run `Event_Calib_Cameras` and conenct everything. You now should be able to see the chessboard with no distortion! 111 | 112 |

113 | 114 |

115 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /documentation/calibration/video_checkboard.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/robotology/event-driven/7dbd368ab2028fe645b0ed7ccdede10685852b56/documentation/calibration/video_checkboard.mp4 -------------------------------------------------------------------------------- /documentation/camera_dumping.md: -------------------------------------------------------------------------------- 1 | ### Dockerfiles for easy install 2 | 3 | ### Instructions for saving data 4 | Look at your namespace to not cause conflicts with others: 5 | `yarp namespace` 6 | 7 | And set your personal one: 8 | `yarp namespace /` 9 | 10 | Where the conf file is: 11 | `yarp conf` 12 | 13 | Look at the IP currently stored in the conf file: 14 | `cat $(yarp conf)` 15 | 16 | Look at the IP of your machine and note the local ethernet (try not using wifi when possible): 17 | `ip -c -h a` 18 | 19 | Set the yarp IP yarpserver will connect to: 20 | `yarp conf 10000` 21 | 22 | Type: 23 | `yarpserver` 24 | 25 | Open a new terminal and run the camera: 26 | `atis-bridge-sdk --help` 27 | 28 | If you are using gen3, use ubuntu 20, so check your version: 29 | `lsb-release -a` 30 | 31 | Check the camera port: 32 | `yarp name list` 33 | 34 | Visualise the data: 35 | `vFramer --src /atis3/AE:o --iso --width 640 --height 480` 36 | 37 | You can visualise different representations, so type the following for the available options: 38 | `vFramer --help` 39 | 40 | To start recording data, open: 41 | `yarpdatadumper --help` 42 | 43 | An example is, not creating latencies: 44 | `yarpdatadumper --txTime --name /left --dir /dump/experiment/ATIS-left` 45 | 46 | Twice if you want stereo: 47 | `yarpdatadumper --txTime --name /right --dir /dump/experiment/ATIS-right` 48 | 49 | Connect to the port to stream events in the recording ports: 50 | `yarp connect /atis3/AE:o /left fast_tcp && yarp connect /atis3/AE:o /right fast_tcp` 51 | 52 | Disconnect or CTRL-C in the dumper: 53 | `yarp disconnect /atis3/AE:o /left && yarp disconnect /atis3/AE:o /right` 54 | 55 | Playback data having checked yarp was installed with Qt dapendency: 56 | `yarpdataplayer` 57 | 58 | Open the folder at the level it contains ATIS and ATIS2, not inside each folder. Once loaded, press play. 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /documentation/checkingcalibration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/robotology/event-driven/7dbd368ab2028fe645b0ed7ccdede10685852b56/documentation/checkingcalibration.png -------------------------------------------------------------------------------- /documentation/connect_to_zcb.md: -------------------------------------------------------------------------------- 1 | # Connect to a ZCB or z-turn 2 | 3 | You have a ZCB that is powered on and sitting in front of you and you want to read events from the camera/skin/cochlea that is attached to it. We want to make an `ssh` connection to it, connect to the `yarpserver`, and run `zynqGrabber`. Here are the steps: 4 | 5 | First you are going to make a serial connection so you can configure the network connection. You need the network for `YARP` to function. 6 | 7 | ## Serial connection over (ubuntu instructions) 8 | * Plug in a micro-usb cable from your laptop to the ZCB. Wait! There are two usb ports on the ZCB. Use the one that says uart next to it - it should be on the opposite side to the sd-card port. 9 | ```bash 10 | sudo apt install screen 11 | sudo screen /dev/ttyUSB0 115200 12 | ``` 13 | The login and password should be given to you by EDPR-IIT. 14 | 15 | Okay now you have a terminal *inside the ZCB!* You need to decide how you are going to connect the ZCB to the network: 16 | 17 | To exit `screen` do `ctrl+a` to open the list of commands,, then press `k`, then `y` to close the session. 18 | 19 | ### Option 1: External network 20 | 21 | Connect both your own laptop and the ZCB to the same local network using an ethernet cable. The router should allocate the IP addresses and, if the network has internet connection, the ZCB should have connection too. 22 | 23 | :warning: The IP address might change from time-to-time. 24 | 25 | Set-up the ZCB with a DHCP connection. 26 | 27 | ### Option 2: External network with Static IP 28 | 29 | Connect both your own laptop and the ZCB to the same local network using an ethernet cable. Ask your network administrator for an available address to assign to the ZCB. It will have internet connectivety and the IP won't change. 30 | 31 | Set-up the ZCB with a static ip. 32 | 33 | ### Option 3: Ad-hoc with Static IP 34 | 35 | Connect your laptop ethernet directly to the ZCB ethernet port. The ZCB won't have internet connection, so if you need to configure the install/update the software, this option isn't valid - but it could be the easiest for a demo once you know the ZCB is already working. 36 | 37 | You'll need to set your own laptop as well as the ZCB to have a static IP. 38 | 39 | ### Option 4: Ad-hoc with Internet Connection Sharing 40 | 41 | Connect your laptop ethernet directly to the ZCB ethernet port. With Internet connection sharing you should be able to share your laptops wifi connection to the ZCB over the ethernet. Your laptop assigns the IP so it can change every time you connect. To do so use run `nm-connection-editor` from terminal, press the `+` button to add a new connection, configure it to be ethernet and under `IPv4 Settings` use the "Shared to other computers" method. Name your connection something informative e.g. "ZCB connection". 42 | 43 | Set the ZCB to a dynamic IP address 44 | 45 | ## Setting the IP address of the ZCB 46 | 47 | On the `screen` connection to the ZCB do the following: 48 | ```bash 49 | nano /etc/network/interfaces 50 | ``` 51 | Add the following lines depending on the IP type: 52 | - **Dynamic IP** 53 | ```bash 54 | auto eth0 55 | iface eth0 inet dhcp 56 | hwaddress ether 00:0a:35:00:01:X 57 | ``` 58 | - **Static IP** 59 | ```bash 60 | auto eth0 61 | iface eth0 inet static 62 | hwaddress ether 00:0a:35:00:01:X 63 | address 64 | netmask 255.255.255.0 65 | ``` 66 | 67 | Note: X here needs to be set such that each ZCB/z-turn you have on the same network has a different hardware address. It should be 2 letters in hex (i.e. 00 to FF) 68 | 69 | Save and exit `nano`. 70 | 71 | ```bash 72 | sudo ifdown eth0 73 | sudo ifup eth0 74 | ifconfig 75 | ``` 76 | 77 | The `` of the board should be reported. Take note so you can connect to the board over SSH. 78 | 79 | ## SSH connection 80 | 81 | On your laptop 82 | ```bash 83 | ping 84 | ``` 85 | if a connection is found 86 | ``` 87 | ssh icub@ 88 | ``` 89 | The password should be given to you by EDPR-IIT. 90 | -------------------------------------------------------------------------------- /documentation/datasets.md: -------------------------------------------------------------------------------- 1 | # Datasets 2 | 3 | Event-driven datasets contain the stream of events captured from a single or stereo event cameras. Each dataset can be played using the _yarpdataplayer_. Once the dataset is loaded in the _yarpdataplayer_, the specified output port (default=`/zynqGrabber/AE:o`) is opened and can be connected to any event-driven processing module, the event-driven pre-processing module (_vPreProcess_), or directly to the event-driven visualizer (_vFramerLite_). 4 | 5 | Each dataset may also contain: 6 | 7 | * other sensors on the iCub (also re-playable with the _yarpdataplayer_), 8 | * ground truth measurements, given the task (_e.g._ ball locations, corner locations), and 9 | * Matlab scripts to generate the result figures as in the relevant papers. 10 | 11 | Datasets recorded with the DVS and ATIS have the following properties which must be set correctly in the module parameters (_i.e._ width and height) and the CMake options (_i.e._ `VLIB_CODEC_TYPE`, `VLIB_TIMER_BITS` and `VLIB_CLOCK_PERIOD_NS`), to interpret the data: 12 | 13 | | Sensor Type | width | height | `VLIB_CODEC_TYPE` | `VLIB_TIMER_BITS` | `VLIB_CLOCK_PERIOD_NS` | 14 | | ----------- | ----- | ------ | ----------------- | ----------------- | ---------------------- | 15 | | DVS | 128 | 128 | CODEC_128x128 | 24 | 128 | 16 | 17 | | Sensor Type | width | height | `VLIB_CODEC_TYPE` | `VLIB_TIMER_BITS` | `VLIB_CLOCK_PERIOD_NS` | 18 | | ----------- | ----- | ------ | ----------------- | ----------------- | ---------------------- | 19 | | ATIS_20 | 304 | 240 | CODEC_304x240_20 | 24 | 80 | 20 | 21 | | Sensor Type | width | height | `VLIB_CODEC_TYPE` | `VLIB_TIMER_BITS` | `VLIB_CLOCK_PERIOD_NS` | 22 | | ----------- | ----- | ------ | ----------------- | ----------------- | ---------------------- | 23 | | ATIS_24 | 304 | 240 | CODEC_304x240_24 | 30 | 80 | 24 | 25 | ## Available Datasets 26 | 27 | ### Sample Dataset 28 | * simple dataset to visualise 29 | * ATIS_24 30 | 31 | [download](https://doi.org/10.5281/zenodo.2556755) 32 | 33 | ### Ball Detection and Tracking 34 | * 2 Datasets = hand-move, eye-move 35 | * DVS 36 | * ground truth supplied 37 | 38 | [download](https://figshare.com/s/0abd8f18312bec15b121) 39 | 40 | ### Corner Detection 41 | * 2 Datasets = checkboard (multiple different speeds and angles), naturalscene 42 | * DVS 43 | * ground truth supplied for naturalscene 44 | 45 | [download](https://figshare.com/s/0abd8f18312bec15b121) 46 | 47 | ### VVV18-EVENTDRIVEN-DATASET 48 | 49 | * 3 Datasets = 1, 2, 3 with different motions of the object and the robot 50 | * robot encoder positions for head and torso supplied, stereo ATIS output, stereo RGB camera supplied 51 | * scripts to recreate the movement of the robot supplied 52 | * ATIS_20 53 | 54 | [download](https://figshare.com/s/0abd8f18312bec15b121) 55 | 56 | ### Parallel Visual Tracking 57 | 58 | * 10 datasets with moving circular target 59 | * contains ground truth and outputs of tracking algorithm run on the CPU and on the SpiNNAker 60 | * ATIS_24 61 | 62 | [download](https://doi.org/10.5281/zenodo.2556755) 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /documentation/docker.md: -------------------------------------------------------------------------------- 1 | ## Setup docker environment 2 | ### Check udev rules in your host environment 3 | ``` 4 | ls /etc/udev/rules.d -alh 5 | 6 | # If you don't have `88-cyusb.rule` and `99-evkv2.rules` in this folder, 7 | # Add them with following commands 8 | # Otherwise, you can skip these commands 9 | sudo wget -P /etc/udev/rules.d https://raw.githubusercontent.com/prophesee-ai/openeb/main/hal_psee_plugins/resources/rules/88-cyusb.rules 10 | sudo wget -P /etc/udev/rules.d https://raw.githubusercontent.com/prophesee-ai/openeb/main/hal_psee_plugins/resources/rules/99-evkv2.rules 11 | 12 | # Reload udev rules in your host environment 13 | sudo udevadm control --reload-rules 14 | sudo udevadm trigger 15 | ``` 16 | 17 | ### Check hardware configuration 18 | Firmware 19 | - To use later version of Metavision SDK than V3.1.2, the Firmware of EVK3 and EVK4 is required to be at least in version 3.9 20 | - Check this [Release Notes](https://docs.prophesee.ai/stable/release_notes.html#v3-1-2-change-logs-16-12-2022) 21 | 22 | Ubuntu version 23 | - To use Ubuntu 22.04, Metavision SDK should be at least in version 4.0 24 | - This means firmware should be at least in version 3.9 25 | - The earlier version of Metavision SDK than 4.0 does not support Ubuntu 22.04. 26 | - Should use Ubuntu 20.04 27 | 28 | ### Build docker image 29 | Environment: Ubuntu 20.04 + Metavision SDK 3.0 + EVK firmware (< 3.9) 30 | ``` 31 | docker build --build-arg UID=$(id -u) --build-arg GID=$(id -g) -f Dockerfile_Ubuntu2004 -t event-driven:ubuntu20.04 . 32 | ``` 33 | Environment: Ubuntu 22.04 + Metavision SDK 4.6 + EVK firmware (>= 3.9) 34 | ``` 35 | docker build --build-arg UID=$(id -u) --build-arg GID=$(id -g) -f Dockerfile_Ubuntu2204 -t event-driven:ubuntu22.04 . 36 | ``` 37 | - Input current User ID and Group ID into Docker environment 38 | 39 | ### Run and enter docker container 40 | ``` 41 | docker run -it --privileged --network host -v /tmp/.X11-unix/:/tmp/.X11-unix -v /dev/bus/usb:/dev/bus/usb -e DISPLAY=unix$DISPLAY --name event-driven event-driven:ubuntu20.04 42 | ``` 43 | - To use Ubuntu 22.04, change the last part of command to `event-driven:ubuntu22.04`. 44 | 45 | ### Open X Server for docker environment 46 | ``` 47 | xhost local:docker 48 | ``` 49 | -------------------------------------------------------------------------------- /documentation/eventcodecs.md: -------------------------------------------------------------------------------- 1 | # Event Coding 2 | 3 | Events are serialised and coded in a standardised format for sending and receiving between modules. In addition, a packet containing multiple different types of events is segmented by event-type such that a search can quickly retrieve events of only a specific type. The packet is formed as such: 4 | ``` 5 | EVENTTYPE-1-TAG ( serialised and concatenated events of type 1) EVENTTYPE-2-TAG ( serialised and concatenated events of type 2) ... 6 | ``` 7 | Each event class defines the TAG used to identify itself and also the method with which the event data is serialised. Managing the serialisation and de-serialisation of the event data is then simply a case of using the event class to write/read its TAG and then call its encode/decode functions on the serialised data. The `eventdriven::vBottle` class handles the coding of packets in the event-driven project. 8 | 9 | Events are defined in a class hierarchy, with each child class calling its parent encode/decode function before its own. Adding a new event therefore only requires defining the serialisation method for any new data that the event-class contains (_e.g._ the **Flow** event only defines how the velocities are encoded and calls its parent class, the **AdressEvent**, to encode other information, such as position and timestamp). 10 | 11 | ![The Event-Type Class Hierarchy](@ref classev_1_1vEvent.png) 12 | 13 | # Event Coding Definitions 14 | 15 | The **vEvent** uses 4 bytes to encode a timestamp (_T_) 16 | ``` 17 | [10000000 TTTTTTT TTTTTTTT TTTTTTTT] 18 | ``` 19 | An **AddressEvent** uses 4 bytes to encode position (_X_, _Y_), polarity (_P_) and channel (_C_). Importantly, as AddressEvent is of type `vEvent` the timestamp information of this event is always encoded as well. 20 | ``` 21 | [00000000 00000000 CYYYYYYY XXXXXXXP] 22 | ``` 23 | or, if the **VLIB_10BITCODEC** CMake flag is set **ON** (used for the ATIS camera) the AddressEvent is encoded as: 24 | ``` 25 | [00000000 000C00YY YYYYYYXX XXXXXXXP] 26 | ``` 27 | 28 | A **FlowEvent** uses 8 bytes to encode velocity (ẋ, ẏ), each 4 bytes represent a _float_. Similarly as FlowEvent is of time AddressEvent the FlowEvent also encodes all the position and timestamp information above. 29 | ``` 30 | [ẋẋẋẋẋẋẋẋ ẋẋẋẋẋẋẋẋ ẏẏẏẏẏẏẏẏ ẏẏẏẏẏẏẏẏ] 31 | ``` 32 | A **LabelledAE** is labelled as belonging to a group ID (_I_) using a 4 byte _int_. 33 | ``` 34 | [IIIIIIIII IIIIIIIII IIIIIIIII IIIIIIIII] 35 | ``` 36 | A **GaussianAE** extends a cluster event with a 2 dimensional Gaussian distribution parameterised by (_sx_, _sy_, _sxy_) using a total of 12 bytes. 37 | ``` 38 | [sxsxsxsxsxsxsxsx sxsxsxsxsxsxsxsx sxsxsxsxsxsxsxsx sxsxsxsxsxsxsxsx sysysysysysysysy sysysysysysysysy sysysysysysysysy sysysysysysysysy sxysxysxysxysxysxysxysxy sxysxysxysxysxysxysxysxy sxysxysxysxysxysxysxysxy sxysxysxysxysxysxysxysxy] 39 | ``` 40 | A **CochleaEvent** uses 4 bytes to encode frequency channel (_F_), polarity (_P_), neuron id of the Jeffress model (_N_), the olive model (_O_), the auditory model (_M_), and channel (_C_). Importantly, as CochleaEvent is of type `vEvent` the timestamp information of this event is always encoded as well. 41 | ``` 42 | [00000100 0C100NNN NNNNRRMO FFFFFFFP] 43 | ``` 44 | 45 | # Coding in YARP 46 | 47 | The `eventdriven::vBottle` class wraps the encoding and decoding operations into a `yarp::os::Bottle` such that an example `vBottle` will appear as: 48 | ``` 49 | AE (-2140812352 15133 -2140811609 13118) FLOW (-2140812301 13865 -1056003417 -1055801578) 50 | ``` 51 | **NOTE:** The actual data sent by YARP for a bottle includes signifiers for data type and data length, adding extra data to the bottle as above. 52 | 53 | ``` 54 | 256 4 4 2 'A' 'E' 257 4 -2140812352 15133 -2140811609 13118 4 4 'F' 'L' 'O' 'W' 257 4 -2140812301 13865 -1056003417 -1055801578 55 | ``` -------------------------------------------------------------------------------- /documentation/example-module-py/example-module.py: -------------------------------------------------------------------------------- 1 | import yarp 2 | import sys 3 | import numpy as np 4 | import threading 5 | import cv2 6 | 7 | 8 | class ExampleModule(yarp.RFModule): 9 | 10 | def __init__(self): 11 | yarp.RFModule.__init__(self) 12 | self.image = np.zeros((240, 304)) 13 | self.image_buf = np.zeros((240, 304)) 14 | self.input_port = yarp.BufferedPortBottle() 15 | self.rpc_port = yarp.RpcServer() 16 | cv2.namedWindow("events", cv2.WINDOW_NORMAL) 17 | self.mutex = threading.Lock() 18 | 19 | def configure(self, rf): 20 | # set the module name used to name ports 21 | self.setName((rf.check("name", yarp.Value("/exampleModule")).asString())) 22 | 23 | # open io ports 24 | if not self.input_port.open(self.getName() + "/AE:i"): 25 | print("Could not open input port") 26 | return False 27 | self.input_port.setStrict() 28 | 29 | if not self.rpc_port.open(self.getName() + "/rpc"): 30 | print("Could not open rpc port") 31 | return False 32 | self.attach_rpc_server(self.rpc_port) # rpc port receives command in the respond method 33 | 34 | # read flags and parameters 35 | example_flag = rf.check("example_flag") and rf.check("example_flag", yarp.Value(True)).asBool() 36 | default_value = 0.1 37 | example_parameter = rf.check("example_parameter", yarp.Value(default_value)).asFloat64() 38 | 39 | # do any other set-up required here 40 | # start the asynchronous and synchronous threads 41 | threading.Thread(target=self.run).start() 42 | 43 | return True 44 | 45 | def respond(self, command, reply): 46 | # Add any command you want to receive from rpc here 47 | print(command.toString()) 48 | reply.addString('ok') 49 | return True 50 | 51 | def getPeriod(self): 52 | return 0.03 # period of synchronous thread, return 0 update module called as fast as it can 53 | 54 | def interruptModule(self): 55 | # interrupting all the ports 56 | self.input_port.interrupt() 57 | self.rpc_port.interrupt() 58 | return True 59 | 60 | def close(self): 61 | # closing ports 62 | self.input_port.close() 63 | self.rpc_port.close() 64 | cv2.destroyAllWindows() 65 | return True 66 | 67 | def updateModule(self): 68 | # synchronous update called every get period seconds. 69 | 70 | # Put visualization, debug prints, etc... here 71 | cv2.imshow("events", self.image) 72 | cv2.waitKey(10) 73 | return True 74 | 75 | def run(self): 76 | # asynchronous thread runs as fast as it can 77 | while not self.isStopping(): 78 | reads = max(1, self.input_port.getPendingReads()) # Make sure that we try to read at least one packet 79 | 80 | if reads > 10: 81 | print("WARNING! The size of input port buffer is ", reads, " elements long. This may cause lags.") 82 | 83 | self.image_buf.fill(0.5) 84 | 85 | for i in range(reads): 86 | bot = self.input_port.read() 87 | # Data in the bottle is organized as ( .... ) 88 | vType = bot.get(0).asString() 89 | if vType != "AE": 90 | continue 91 | event_bottle = np.array(bot.get(1).toString().split(' ')).astype(int).reshape(-1, 2) 92 | timestamps = event_bottle[:, 0] 93 | events = event_bottle[:, 1] 94 | x = events >> 12 & 0xFF 95 | y = events >> 1 & 0x1FF 96 | pol = events & 0x01 97 | self.image_buf[x, y] = pol 98 | 99 | self.mutex.acquire() 100 | self.image = self.image_buf.copy() # self.image is a shared resource between threads 101 | self.mutex.release() 102 | 103 | 104 | if __name__ == '__main__': 105 | # Initialise YARP 106 | yarp.Network.init() 107 | if not yarp.Network.checkNetwork(2): 108 | print("Could not find network! Run yarpserver and try again.") 109 | exit(-1) 110 | 111 | # prepare and configure the resource finder 112 | rf = yarp.ResourceFinder() 113 | rf.setVerbose(False) 114 | rf.setDefaultContext("eventdriven") 115 | rf.setDefaultConfigFile("exampleModule.ini") 116 | rf.configure(sys.argv) 117 | 118 | # create the module 119 | module = ExampleModule() 120 | module.runModule(rf) 121 | -------------------------------------------------------------------------------- /documentation/example-module/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # requires minimum cmake version 2 | cmake_minimum_required(VERSION 3.5) 3 | 4 | # produce the cmake var PROJECT_NAME 5 | project(example-module) 6 | 7 | include(GNUInstallDirs) 8 | if(NOT CMAKE_CONFIGURATION_TYPES) 9 | if(NOT CMAKE_BUILD_TYPE) 10 | message(STATUS "Setting build type to 'Release' as none was specified.") 11 | set_property(CACHE CMAKE_BUILD_TYPE PROPERTY VALUE "Release") 12 | endif() 13 | endif() 14 | 15 | find_package(YCM REQUIRED) 16 | include(AddInstallRPATHSupport) 17 | add_install_rpath_support(BIN_DIRS "${CMAKE_INSTALL_FULL_BINDIR}" 18 | LIB_DIRS "${CMAKE_INSTALL_FULL_LIBDIR}" 19 | INSTALL_NAME_DIR "${CMAKE_INSTALL_FULL_LIBDIR}" 20 | USE_LINK_PATH) 21 | 22 | 23 | 24 | find_package(YARP COMPONENTS os sig math dev REQUIRED) 25 | find_package(event-driven REQUIRED) 26 | 27 | #default the install location to that of event-driven 28 | 29 | add_executable(${PROJECT_NAME} ${PROJECT_NAME}.cpp) 30 | 31 | target_link_libraries(${PROJECT_NAME} PRIVATE YARP::YARP_os 32 | YARP::YARP_init 33 | ev::event-driven) 34 | 35 | install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_BINDIR}) 36 | 37 | #install .ini files and .xml.template files to share event-driven 38 | yarp_install(FILES ${PROJECT_NAME}.ini 39 | DESTINATION ${EVENT-DRIVEN_CONTEXTS_INSTALL_DIR}/${PROJECT_NAME}) 40 | yarp_install(FILES app_${PROJECT_NAME}.xml 41 | DESTINATION ${EVENT-DRIVEN_APPLICATIONS_INSTALL_DIR}) 42 | 43 | option(ADD_DOCS_TO_IDE "Add apps/documentation to IDE" OFF) 44 | if(ADD_DOCS_TO_IDE) 45 | add_custom_target(${PROJECT_NAME}_docs SOURCES app_${PROJECT_NAME}.xml ${PROJECT_NAME}.ini README.md) 46 | endif() 47 | 48 | #uninstall target? 49 | -------------------------------------------------------------------------------- /documentation/example-module/README.md: -------------------------------------------------------------------------------- 1 | # example-module 2 | 3 | Add information about how your module works! 4 | -------------------------------------------------------------------------------- /documentation/example-module/app_example-module.xml: -------------------------------------------------------------------------------- 1 | 2 | example-module 3 | 4 | 5 | 6 | 7 | 8 | vPreProcess 9 | --flipx --flipy --split_stereo 10 | localhost 11 | 12 | 13 | 14 | example-module 15 | --name /example-module --example_flag true --example_parameter 0.01 16 | localhost 17 | 18 | 19 | 20 | vFramerLite 21 | --displays "(/viewer (AE ISO))" 22 | localhost 23 | 24 | 25 | 26 | yarpview 27 | --name /view_out --synch 28 | localhost 29 | 30 | 31 | 32 | 33 | /zynqGrabber/AE:o 34 | /vPreProcess/AE:i 35 | fast_tcp 36 | 37 | 38 | 39 | /vPreProcess/left:o 40 | /example-module/AE:i 41 | fast_tcp 42 | 43 | 44 | 45 | /example-module/AE:o 46 | /vFramer/viewer/AE:i 47 | fast_tcp 48 | 49 | 50 | 51 | /vFramer/viewer/image:o 52 | /view_out 53 | fast_tcp 54 | 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /documentation/example-module/example-module.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2022 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | using namespace ev; 26 | using namespace yarp::os; 27 | 28 | class exampleModule : public RFModule { 29 | 30 | private: 31 | 32 | //yarp ports 33 | ev::window input_port; 34 | ev::BufferedPort output_port; 35 | 36 | //threads 37 | std::mutex m; 38 | std::condition_variable signal; 39 | std::thread event_thread; 40 | std::thread proc_thread; 41 | double current_ts{0.0}, previous_ts{0.0}; 42 | 43 | //internal data 44 | cv::Mat my_eros; 45 | bool example_flag; 46 | double example_parameter; 47 | 48 | public: 49 | 50 | exampleModule() {} 51 | 52 | virtual bool configure(yarp::os::ResourceFinder& rf) 53 | { 54 | if(rf.check("h") || rf.check("help")) 55 | { 56 | yInfo() << "help message"; 57 | yInfo() << "variable 1 : description"; 58 | } 59 | 60 | /* initialize yarp network */ 61 | if (!yarp::os::Network::checkNetwork(2.0)) { 62 | std::cout << "Could not connect to YARP" << std::endl; 63 | return false; 64 | } 65 | 66 | //set the module name used to name ports 67 | setName((rf.check("name", Value("/example-module")).asString()).c_str()); 68 | 69 | //open io ports 70 | if(!input_port.open(getName() + "/AE:i")) { 71 | yError() << "Could not open input port"; 72 | return false; 73 | } 74 | output_port.setWriteType(AE::tag); 75 | if(!output_port.open(getName() + "/AE:o")) { 76 | yError() << "Could not open input port"; 77 | return false; 78 | } 79 | 80 | //read flags and parameters 81 | example_flag = rf.check("example_flag") && 82 | rf.check("example_flag", Value(true)).asBool(); 83 | double default_value = 0.1; 84 | example_parameter = rf.check("example_parameter", 85 | Value(default_value)).asFloat64(); 86 | 87 | //do any other set-up required here 88 | cv::namedWindow(getName(), cv::WINDOW_NORMAL); 89 | cv::resizeWindow(getName(), cv::Size(640, 480)); 90 | event_thread = std::thread([this]event_process); 91 | proc_thread = std::thread([this]secondary_process); 92 | 93 | //start the asynchronous and synchronous threads 94 | return Thread::start(); 95 | } 96 | 97 | virtual double getPeriod() 98 | { 99 | return 1.0; //period of synchrnous thread 100 | } 101 | 102 | bool interruptModule() 103 | { 104 | //if the module is asked to stop ask the asynchrnous thread to stop 105 | input_port.close(); 106 | event_thread.join(); 107 | proc_thread.join(); 108 | return Thread::stop(); 109 | } 110 | 111 | //synchronous thread (threaded) critical function 112 | virtual bool updateModule() 113 | { 114 | cv::imshow(getName(), my_eros); 115 | cv::waitKey(1); 116 | 117 | //add any synchronous operations here, visualisation, debug out prints 118 | return Thread::isRunning(); 119 | } 120 | 121 | //critical (threaded) function 122 | void event_process() 123 | { 124 | while(!isStopping()) 125 | { 126 | ev::info stats = input_port.readAll(true); 127 | 128 | std::unique_lock lk(m); 129 | for(auto &v : input_port) 130 | ev::eros.update(); 131 | current_ts = stats.timestamp; 132 | lk.unlock(); 133 | signal.notify_one(); 134 | } 135 | } 136 | 137 | //critical (threaded) function 138 | void secondary_process() 139 | { 140 | while(!isStopping()) 141 | { 142 | //get the data structure 143 | std::unique_lock lk(m); 144 | signal.wait(lk, [this](return current_ts > previous_ts || isStopping());); 145 | if(isStopping()) break; 146 | previous_ts = current_ts; 147 | ev::eros.getSurface().copyTo(my_eros); 148 | lk.unlock(); 149 | 150 | //further processing 151 | 152 | //event-driven output if necessary 153 | ev::packet &output_packet = output_port.prepare(); 154 | output_packet.clear(); 155 | AE ae; 156 | output_packet.duration(); 157 | output_packet.timestamp(); 158 | output_packet.push_back(ae); 159 | output_port.write(); 160 | } 161 | } 162 | }; 163 | 164 | int main(int argc, char * argv[]) 165 | { 166 | /* prepare and configure the resource finder */ 167 | yarp::os::ResourceFinder rf; 168 | rf.configure( argc, argv ); 169 | 170 | /* create the module */ 171 | exampleModule instance; 172 | return instance.runModule(rf); 173 | } 174 | -------------------------------------------------------------------------------- /documentation/example-module/example-module.ini: -------------------------------------------------------------------------------- 1 | name /example-module 2 | example_flag false 3 | example_parameter 0.01 4 | -------------------------------------------------------------------------------- /documentation/example_module.md: -------------------------------------------------------------------------------- 1 | # Writing an example module for `event-driven` 2 | 3 | You are ready to write your own code that uses the tools in the `event-driven` library to process events. An example module is provided in [example_module](example_module/README.md) that you can use as the basis for writing a module that can be integrated into the `YARP` framework. 4 | 5 | The module has the following functionality: 6 | 7 | * Reading a `*.ini` file to load configuration options, and setting options via command line arguments. 8 | * Clean exit by capturing `ctrl+c` commands and allowing functions to close ports and clean memory. 9 | * Example `event-driven` ports for reading and writing events. 10 | * A synchronous thread, typically used to show a visualisation or status/debug messages at a readable rate. 11 | * An asynchronous thread, used to read events at sub-millisecond rates and perform processing as required. 12 | * An example CMake file for installing the generated binaries in the install location of `event-driven`, as well as installation of the configuration and application files in locations required by `YARP`. 13 | 14 | ## How to use the example-module 15 | 16 | First, copy the example files to the new location of your project, _e.g._ the same folder where you have cloned `event-driven`. Assuming a`` directory: 17 | ```bash 18 | cp -r /event-driven/documentation/example-module 19 | ``` 20 | 21 | The project can be compiled with modifications: 22 | ```bash 23 | cd /example-module 24 | mkdir build && cd build 25 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR 26 | make install -j4 27 | ``` 28 | 29 | However, some modifications should be made to personalise the project. The first step would be to change the folder, file and project name: 30 | ```bash 31 | cd 32 | mv example-module 33 | mv /example-module.cpp /.cpp 34 | mv example-module.ini .ini 35 | mv app_example-module.xml app_.xml 36 | nano CMakeLists.txt 37 | ``` 38 | On [line 5](https://github.com/robotology/event-driven/blob/99a1f941141b33266900e034d3e7789d55fd0d99/documentation/example-module/CMakeLists.txt#L5) change the project name to ``, then save (`ctrl+o`) and exit (`ctrl+x`). 39 | ```bash 40 | nano /.cpp 41 | ``` 42 | On line [7](https://github.com/robotology/event-driven/blob/99a1f941141b33266900e034d3e7789d55fd0d99/documentation/example-module/example-module.cpp#L7), [19](https://github.com/robotology/event-driven/blob/99a1f941141b33266900e034d3e7789d55fd0d99/documentation/example-module/example-module.cpp#L19),[24](https://github.com/robotology/event-driven/blob/99a1f941141b33266900e034d3e7789d55fd0d99/documentation/example-module/example-module.cpp#L24), and [124](https://github.com/robotology/event-driven/blob/99a1f941141b33266900e034d3e7789d55fd0d99/documentation/example-module/example-module.cpp#L124) change the class, constructor and declaration to ``, then save (`ctrl+o`) and exit (`ctrl+x`). 43 | 44 | Change the first line of the default configuration file and the lines that refer 45 | to `example-module` in the example `yarpmanager` application. For example: 46 | 47 | ```bash 48 | nano /.ini 49 | nano /app_.xml 50 | ``` 51 | 52 | or if you like: 53 | 54 | ```bash 55 | sed -i 's/example-module//g' .xml 56 | ``` 57 | 58 | The module should now be personalised to your processing task. 59 | 60 | If you like, you can import the project into your favourite IDE. To do so, _e.g._ for [QtCreator](https://www.qt.io/product) *open a new project* by selecting `//CmakeLists.txt`. Select the kits `release` and `debug` modifying the build directory to `//build` and `//build-debug` respectively. You should now be able to edit, compile, run and debug the module from within QtCreator. 61 | -------------------------------------------------------------------------------- /documentation/full_installation.md: -------------------------------------------------------------------------------- 1 | # Comprehensive Installation 2 | 3 | We'll go through the recommended set-up of `event-driven` for a first time user of the `YARP` environment. 4 | The first step is to create a directory in which to install `YARP`, `event-driven` and the eventual modules you will write. For example: 5 | ```bash 6 | mkdir ~/yarp-install 7 | ``` 8 | Secondly, we want to set up some environment variables that will make the install go smoother. Use your favourite text editor to open `~/.bashrc` and add the following lines: 9 | ```bash 10 | export INSTALL_DIR=~/yarp-install 11 | export CMAKE_PREFIX_PATH=$CMAKE_PREFIX_PATH:$INSTALL_DIR 12 | export YARP_DATA_DIRS=$INSTALL_DIR/share/yarp:$INSTALL_DIR/share/event-driven 13 | export PATH=$PATH:$INSTALL_DIR/bin 14 | ``` 15 | and don't forget to `source ~/.bashrc` to apply the changes! 16 | 17 | Next we want to get the required repositories. Change directory into one in which you want these projects, for example: 18 | ```bash 19 | mkdir ~/projects && cd ~/projects 20 | ``` 21 | then, 22 | ```bash 23 | git clone https://github.com/robotology/YCM.git 24 | git clone https://github.com/robotology/yarp.git 25 | git clone https://github.com/robotology/event-driven.git 26 | ``` 27 | :warning: We are going to build the repositories in the above order too! However, first you might need some extra dependencies for `YARP`. One option is to go [here](http://wiki.icub.org/wiki/Linux:Installation_from_sources) and follow the _Getting all dependencies_ instructions (either installing dependencies yourself or adding to the `apt` path). 28 | 29 | :warning: the versions of each software that you checkout are current as of the writing of this tutorial. If a newer *stable* version of YCM or YARP is available it is recommended to upgrade to that version. Use `git ls-remote --tags origin` to see all versions available. 30 | 31 | Now you have dependencies, let's install `YCM`: 32 | ```bash 33 | cd ~/projects/YCM 34 | git checkout v0.10.4 35 | mkdir build && cd build 36 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR 37 | make install -j$(nproc) 38 | ``` 39 | 40 | Then let's install `YARP`: 41 | ```bash 42 | cd ~/projects/yarp 43 | git checkout v3.2.1 44 | mkdir build && cd build 45 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR 46 | make install -j$(nproc) 47 | ``` 48 | Finally, let's install `event-driven`: 49 | ```bash 50 | cd ~/projects/event-driven 51 | mkdir build && cd build 52 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR 53 | make install -j$(nproc) 54 | ``` 55 | 56 | [Continue with the tutorials](README.md) to test your installation, or: 57 | 58 | ### Install icub-main (optional) 59 | 60 | `icub-main` is used if you are using the iCub robot and want to enable some of the `event-driven` modules that control the robot. To install `icub-main` do: 61 | ```bash 62 | cd ~/projects 63 | git clone https://github.com/robotology/icub-main.git 64 | cd icub-main 65 | mkdir build && cd build 66 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR 67 | make install -j$(nproc) 68 | ``` 69 | -------------------------------------------------------------------------------- /documentation/howtosetupSD.md: -------------------------------------------------------------------------------- 1 | 2 | :warning: These instructions are the standard way to set-up the YARP/event-driven environment on an sd-card meant for the zcb or z-turn. If you already have a working sd-card you can simply copy the sd-card. You can find instructions for copying an sd-card at the [bottom](#how-copy-an-entire-sd-card-for-a-new-board). 3 | 4 | # How to set-up an SD for a zynq board 5 | 6 | :warning: These instructions are only needed if your SD card has a fresh OS install and the `event-driven` library is not yet compiled. These instructions shuold be performed on the ZCB/z-turn and not your own laptop. 7 | 8 | ## Set-up icub user 9 | 10 | As **root**: 11 | ```bash 12 | adduser icub 13 | groups icub 14 | visudo 15 | ``` 16 | Add line: `icub ALL=(ALL:ALL) ALL` 17 | 18 | ## Set up repositories 19 | 20 | As **icub**: 21 | - Follow the [installation instructions](full_installation.md) putting projects into `~/projects` and installing into `~/install`. You will need to make these folders in the home directory of icub. 22 | 23 | ### Note 1: 24 | As only basic `YARP` support is needed, not all dependencies are required to be installed, instead install only: 25 | ```bash 26 | sudo apt install [TODO] 27 | ``` 28 | ### Note 2: 29 | The newest YARP requires CMake>3.5, which is not installable via `apt` on the Debian 8.10 (jessie) distribution we have installed on the zynq. To upgrade CMake you need to install it via backports (reference: https://backports.debian.org/Instructions). 30 | 31 | :warning: these instructions are out of date (see this [solution](https://unix.stackexchange.com/questions/508724/failed-to-fetch-jessie-backports-repository)) 32 | 33 | To do so: 34 | - add to `/etc/apt/sources.list` the line below: 35 | ```bash 36 | deb http://ftp.debian.org/debian jessie-backports main 37 | ``` 38 | Then: 39 | ```bash 40 | sudo apt update 41 | sudo apt -t jessie-backports install cmake 42 | ``` 43 | At this point you should be able to recompile YARP 3.0 and `event-driven` **master** branch. 44 | 45 | **We can consider updating the Debian distribution of the zynq boards since the Debian 8.10 is no longer supported by YARP** 46 | 47 | ### Note 3: 48 | 49 | When installing `event-driven` use the following options for cmake: 50 | ```bash 51 | cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR -DBUILD_HARDWAREIO=ON -DENABLE_zynqgrabber=ON 52 | ``` 53 | ## Set up device drivers 54 | 55 | As **icub**: 56 | ```bash 57 | sudo usermod -a -G i2c icub 58 | sudo vim /lib/udev/rules.d/77-iit-hpu.rules 59 | ``` 60 | Add lines: 61 | ```bash 62 | SUBSYSTEM=="iit-hpu-class", GROUP="i2c" 63 | ``` 64 | Then 65 | ```bash 66 | sudo vim /etc/rc.local 67 | ``` 68 | Add lines: 69 | ```bash 70 | insmod /home/icub/iit-hpucore-dma.ko rx_pn=1024 rx_ps=8192 rx_to=5000 71 | ``` 72 | ## Misc 73 | 74 | Check the device driver meta data: 75 | ```bash 76 | udevadm info -q all -a /dev/iit-hpu0 77 | ``` 78 | Check the device driver parameters: 79 | ```bash 80 | cat /sys/module/iit_hpucore_dma/parameters/ps 81 | ``` 82 | 83 | # How copy an entire sd-card for a new board 84 | 85 | ## PARTITION THE NEW SD 86 | 87 | * Insert the new SD 88 | * `sudo gparted` (`sudo apt-get install gparted` if needed) 89 | * `gparted` GUI should detect the SD 90 | * Unmount the SD in gparted GUI (you cannot partition a mounted drive) 91 | * Create new partitions: 1. FAT32 name:BOOT 50MiB 2. EXT4 name:rootfs (max-250) 3. linux-swap name:swap 200MiB 92 | * Edit -> apply all operations 93 | 94 | ## COPY THE FILES 95 | 96 | * Insert old SD (mount the boot and filesystem partitions) 97 | * Copy BOOT (old) -> BOOT (new) (use `/tmp` as a temporary location to store files if you cannot mount both SD cards simultaneously) 98 | * `sudo tar zcvf filesystem.tgz /media/$username/rootfs` (from the old SD - again do this in `/tmp`) 99 | * `sudo sync` (ensure files are copied by flushing file writing queue) 100 | * `cd /media/$username/rootfs` (on the new SD) 101 | * `sudo tar zxvf /tmp/filesystem.tgz --strip-components=3` 102 | * `sudo sync` 103 | -------------------------------------------------------------------------------- /documentation/setup_yarpserver.md: -------------------------------------------------------------------------------- 1 | # Setup the a `yarpserver` 2 | 3 | The most important thing is to make sure all computers that are using `YARP` together are on the same subnet, and that the `yarpserver` is running on the same sub-net. Most problems with connection are because one of these is not true. Here's the standard way to do if for `event-driven`. 4 | 5 | #### Run the `yarpserver` on the ZCB 6 | 7 | On your SSH connection to the ZCB: 8 | ```bash 9 | yarp namespace / 10 | yarp conf 10000 11 | yarpserver 12 | ``` 13 | where`` is the IP address assigned to the [ZCB eth0 connection](connect_to_zcb.md). 14 | If you want to run the `yarpserver` and the `zynqGrabber` on the same SSH connection, you can run the `yarpserver` in the background: 15 | ```bash 16 | yarpserver & 17 | ``` 18 | you can use 19 | ```bash 20 | fg 21 | ``` 22 | to bring `yarpserver` back to the foreground. 23 | 24 | #### Connect your laptop to the `yarpserver` 25 | 26 | On a terminal on your own laptop: 27 | ```bash 28 | yarp conf 10000 29 | ``` 30 | **Note:** this is the ip address of the ZCB! not your laptop! :warning: 31 | ```bash 32 | yarp detect 33 | ``` 34 | should find the `yarpserver` you have running on the zcb. -------------------------------------------------------------------------------- /documentation/zynqGrabber.md: -------------------------------------------------------------------------------- 1 | # How to configure and run `zynqGrabber` 2 | 3 | `zynqGrabber` is an application that runs on the ZCB or z-turn which forms a bridge between the FPGA (reading events from the sensors), and the `YARP` network, such that the events can be processed on one or more CPUs or neuromorphic hardware. To run the `zynqGrabber`: 4 | - the ZCB needs to be running, or connected to, a [`yarpserver`](setup_yarpserver.md), 5 | - `event-driven` needs to be installed correctly with [`zynqGrabber ENABLED`](howtosetupSD.md), and 6 | - the configuration file needs to be configured for the sensors connected to the hardware. 7 | 8 | ## `zynqGrabber` configuration file 9 | 10 | When `event-driven` is installed (`make install`), it installs also the configuration files for the `zynqGrabber`. However the default values in the file need to be modified depending on the hardware that you want to use. 11 | 12 | :warning: We need to make a local copy of the file such that re-installing `event-driven` won't overwrite your local changes with the default ones! 13 | 14 | First of all make an [SSH connection](connect_to_zcb.md) to the ZCB. You can modify the local configuration file using: 15 | ```bash 16 | nano ~/.local/share/yarp/contexts/event-driven/zynqGrabber.ini 17 | ``` 18 | If the file/folder do not exist you must first import the local copy from the installed copy. Run the following command to automatically import a local copy of the configurations file and re-try modifying the file: 19 | ```bash 20 | yarp-config context --import event-driven zynqGrabber.ini 21 | ``` 22 | The `zynqGrabber` has the following options: 23 | * *name* : renames the ports opened, needed if more than one zynqGrabber is running on the same network 24 | * *verbose* : print out a little more information on start-up 25 | * *aps* : turn on the ATIS aps events 26 | * *dataDevice* : the full path to the device that will be opened to read events 27 | * *hpu_read* : open a thread to read data from the HPU and publish it to `YARP` 28 | * *hpu_write* : open a thread to write data to the HPU device 29 | * *packet_size* : the maximum number of events to send in a single packet (too small can lead to latency, too large can lead to packet-loss in UDP connections) 30 | * *visCtrlLeft* : connect to a device to configure the left camera 31 | * *visCtrlRight* : connect to a device to configure the right camera 32 | * *skinCtrl* : connect to a device to configure the skin 33 | * *use_spinnaker* : configure HPU to write to a SpiNNaker device 34 | * *[ATIS_BIAS_LEFT]* : bias values for the left camera 35 | * *[ATIS_BIAS_RIGHT]* : bias values for the right camera 36 | * *[SKIN_CNFG]* : configuration parameters for the skin 37 | 38 | Most default values are fine for use of `event-driven` sensors. However, the hardware you have means that the file will need to be configured. 39 | 40 | * Comment out any devices you don't have connected. 41 | * Set *use_spinnaker* and the HPU device to *hpu_read* and *hpu_write* if connected to the SpiNNaker. 42 | * Change the name if this is not the only `zynqGrabber` on the network 43 | 44 | ## Run the `zynqGrabber` 45 | 46 | Once the configuration has been correctly performed, you can run the `zynqGrabber` from the terminal: 47 | ```bash 48 | zynqGrabber 49 | ``` 50 | If `zynqGrabber` doesn't open it should give a warning message stating the problem, typically `yarpserver` is not running/connected or it is trying to open a device that is not physically connected. 51 | 52 | ## Check the zynqGrabber is streaming data 53 | 54 | On your own laptop that has a [connection to the ZCB `yarpserver`](setup_yarpserver.md) you can run: 55 | ```bash 56 | yarp read ... /zynqGrabber/AE:o 57 | ``` 58 | to verify that the data is streaming. The data is not in a human-readable format but it's presence indicates that the `zynqGrabber` is working. 59 | -------------------------------------------------------------------------------- /ev2/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | set( folder_source 2 | event-driven/core/codec.cpp 3 | event-driven/core/comms.cpp 4 | #include/event-driven/core/vPort.cpp 5 | event-driven/core/utilities.cpp 6 | ) 7 | 8 | set(public_header_files event-driven/core.h) 9 | set( folder_header 10 | event-driven/core.h 11 | event-driven/core/codec.h 12 | event-driven/core/utilities.h 13 | event-driven/core/comms.h 14 | #include/event-driven/core/vPort.h 15 | ) 16 | 17 | if(OpenCV_FOUND) 18 | 19 | list( APPEND folder_source 20 | event-driven/vis/filters.cpp 21 | event-driven/vis/IPT.cpp 22 | event-driven/vis/draw.cpp 23 | event-driven/algs/surface.cpp 24 | event-driven/algs/corner.cpp 25 | event-driven/algs/flow.cpp 26 | #include/event-driven/vis/vDraw_basic.cpp 27 | #include/event-driven/vis/vDraw_ISO.cpp 28 | #src/vDraw_skin.cpp 29 | ) 30 | 31 | list( APPEND public_header_files event-driven/vis.h 32 | event-driven/algs.h) 33 | list( APPEND folder_header 34 | event-driven/vis.h 35 | event-driven/vis/filters.h 36 | event-driven/vis/IPT.h 37 | event-driven/vis/draw.h 38 | event-driven/algs.h 39 | event-driven/algs/surface.h 40 | event-driven/algs/corner.h 41 | event-driven/algs/flow.h 42 | #include/event-driven/vis/vDraw.h 43 | #include/event-driven/vDrawSkin.h 44 | ) 45 | 46 | endif() 47 | 48 | # include_directories(algorithms) 49 | 50 | 51 | # Create everything needed to build our library 52 | add_library(${EVENTDRIVEN_LIBRARY} ${folder_source} ${folder_header}) 53 | add_library(ev::${EVENTDRIVEN_LIBRARY} ALIAS ${EVENTDRIVEN_LIBRARY}) 54 | 55 | 56 | set_target_properties(${EVENTDRIVEN_LIBRARY} PROPERTIES PUBLIC_HEADER "${public_header_files}" 57 | VERSION ${event-driven_VERSION} 58 | SOVERSION 2) 59 | target_include_directories(${EVENTDRIVEN_LIBRARY} PUBLIC "$" 60 | "$/${CMAKE_INSTALL_INCLUDEDIR}>") # FIXME INSTALL PATH 61 | 62 | target_compile_definitions(${EVENTDRIVEN_LIBRARY} PUBLIC 63 | CLOCK_PERIOD=${VLIB_CLOCK_PERIOD_NS} 64 | TIMER_BITS=${VLIB_TIMER_BITS} 65 | $<$:ENABLE_TS=1>) 66 | 67 | target_compile_options(${EVENTDRIVEN_LIBRARY} PRIVATE -Wall) 68 | 69 | if(OpenCV_FOUND) 70 | target_link_libraries(${EVENTDRIVEN_LIBRARY} PUBLIC YARP::YARP_os 71 | YARP::YARP_sig 72 | pthread 73 | ${OpenCV_LIBRARIES}) 74 | else() 75 | target_link_libraries(${EVENTDRIVEN_LIBRARY} PUBLIC YARP::YARP_os 76 | YARP::YARP_sig 77 | pthread) 78 | endif() 79 | 80 | install(TARGETS ${EVENTDRIVEN_LIBRARY} 81 | EXPORT eventdriven 82 | LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT shlib 83 | ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT lib 84 | RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT bin 85 | PUBLIC_HEADER DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${EVENTDRIVEN_LIBRARY}" COMPONENT dev) 86 | 87 | install(DIRECTORY "$/event-driven" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" FILES_MATCHING PATTERN "*.h") 88 | -------------------------------------------------------------------------------- /ev2/event-driven/algs.h: -------------------------------------------------------------------------------- 1 | #include "algs/surface.h" 2 | #include "algs/flow.h" 3 | #include "algs/corner.h" -------------------------------------------------------------------------------- /ev2/event-driven/algs/corner.cpp: -------------------------------------------------------------------------------- 1 | #include -------------------------------------------------------------------------------- /ev2/event-driven/algs/corner.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2022 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | 26 | #include "surface.h" 27 | 28 | namespace ev 29 | { 30 | 31 | class corner_detector 32 | { 33 | private: 34 | 35 | cv::Mat LUT; 36 | ev::SCARF scarf; 37 | int harris_block_size{7}; 38 | 39 | std::thread harris_thread; 40 | std::mutex m; 41 | std::condition_variable signal; 42 | 43 | double threshold{0.0}; 44 | double score_mean{0.0}; 45 | double score_variance{0.0}; 46 | int count{0}; 47 | 48 | void updateLUT() 49 | { 50 | static cv::Mat blurred; 51 | while(harris_block_size > 0) 52 | { 53 | // std::unique_lock lk(m); 54 | // signal.wait(lk, [this]{return eros_updated;}); 55 | // eros_updated = false; 56 | // lk.unlock(); 57 | scarf.getSurface().convertTo(blurred, CV_8U, 255); 58 | cv::GaussianBlur(blurred, blurred, cv::Size(5, 5), 0, 0); 59 | cv::cornerHarris(blurred, LUT, harris_block_size, 3, 0.04); 60 | } 61 | } 62 | 63 | 64 | public: 65 | 66 | void stop() 67 | { 68 | harris_block_size = -1; 69 | harris_thread.join(); 70 | } 71 | 72 | void initialise(int height, int width, int harris_block_size) 73 | { 74 | if (harris_block_size % 2 == 0) 75 | harris_block_size += 1; 76 | this->harris_block_size = harris_block_size; 77 | scarf.initialise({width, height}, 10); 78 | LUT = cv::Mat(height, width, CV_32F); 79 | harris_thread = std::thread([this]{updateLUT();}); 80 | } 81 | 82 | template 83 | void detect(T begin, T end, std::deque &results) 84 | { 85 | //first update the EROS 86 | for(auto &v = begin; v != end; v++) { 87 | scarf.update(v->x, v->y, v->p); 88 | 89 | float& score = LUT.at(v->y, v->x); 90 | if(score > threshold) 91 | results.push_back(*v); 92 | 93 | //if (count < 1000000) { 94 | count++; 95 | double delta = score - score_mean; 96 | score_mean += delta / count; 97 | double delta2 = score - score_mean; 98 | score_variance += delta * delta2; 99 | // } 100 | } 101 | threshold = score_mean + 2*sqrt(score_variance / count); 102 | //threshold = 0.00001; 103 | 104 | // std::unique_lock lk(m); 105 | // eros_updated = true; 106 | // lk.unlock(); 107 | // signal.notify_one(); 108 | 109 | 110 | } 111 | 112 | }; 113 | 114 | 115 | 116 | } -------------------------------------------------------------------------------- /ev2/event-driven/algs/flow.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2022 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | #include 20 | #include 21 | #include 22 | #include 23 | 24 | namespace ev { 25 | 26 | // ================================== 27 | // zcflow is Arren's final version 28 | // ================================== 29 | 30 | class zrtBlock { 31 | friend class zrtFlow; 32 | private: 33 | int N{0}; //this is the maximum number of events to update 34 | cv::Point2d flow; //raw flow assigned to this block 35 | std::deque x_dist; //distribution of x connections 36 | std::deque y_dist; //distribution of y connections 37 | 38 | std::vector pxs_live, pxs_snap; //live/snap circular buffer 39 | int i{0}, is{0}; //new event position live/snap 40 | int j{0}, js{0}; //previously updated position live/snap 41 | 42 | double last_update_tic{0}; //use time for flow decay 43 | 44 | //calculate connections for a single event on the SAE 45 | void singlePixConnections(cv::Mat &sae, int d, double triplet_tolerance, cv::Point p0); 46 | 47 | public: 48 | 49 | zrtBlock(int N); 50 | 51 | //add a new point to the block 52 | void add(cv::Point p); 53 | 54 | //snap saves a copy of the live block into the "snap" variables 55 | void snap(); 56 | 57 | //update connections for each new event 58 | void updateConnections(cv::Mat &sae, int d, double triplet_tolerance); 59 | 60 | //udate the flow state from the connection buffer 61 | void updateFlow(size_t n = 0); 62 | 63 | }; 64 | 65 | class zrtFlow 66 | { 67 | private: 68 | 69 | enum {X=0,Y=1}; 70 | 71 | cv::Mat sae; 72 | std::vector blocklist; 73 | std::vector blockmap; 74 | cv::Size array_dims{{0, 0}}; 75 | cv::Size block_dims{{0, 0}}; 76 | cv::Size image_res{{0, 0}}; 77 | 78 | cv::Mat block_flow[2]; 79 | cv::Mat pixel_flow[2]; 80 | cv::Mat full_flow[2]; 81 | cv::Mat hsv, rgb; 82 | 83 | //parameters 84 | int con_len{3}; 85 | double trip_tol{0.125}; 86 | int con_buf_min{20}; 87 | int smooth_factor{3}; 88 | 89 | public: 90 | 91 | void initialise(cv::Size res, int block_size, int max_N, int connection_length, int con_buf_min, double trip_tol, int smooth_factor); 92 | 93 | //add a new event to the SAE and record the new event with the 94 | //corresponding block 95 | void add(int u, int v, double t); 96 | 97 | //go through each block and update the list of flow vectors 98 | //update the final flow per pixel 99 | void update(); 100 | 101 | cv::Mat makebgr(); 102 | }; 103 | 104 | // ================================== 105 | // zcflow is Zhichao's first version 106 | // ================================== 107 | 108 | class zcflowBlock 109 | { 110 | friend class zcflow; 111 | 112 | private: 113 | cv::Vec3b color; 114 | cv::Point2i index; 115 | cv::Vec2f flow; 116 | 117 | std::vector x_dist; 118 | std::vector y_dist; 119 | 120 | // cv::Mat sae; 121 | double tolerance{0.125}; 122 | //double refracotry_period{0.003}; 123 | double dt{0.05}; 124 | size_t N{30}; 125 | 126 | static const int d_coordinate; 127 | 128 | public: 129 | 130 | void initialise(cv::Point2i i); 131 | 132 | bool block_update_zc(const cv::Mat &sae, int x, int y, cv::Mat &flow_mat, int block_size, cv::Point2i b_index); 133 | 134 | void point_velocity_zc(const cv::Mat &sae, int x, int y, std::vector &flow_x, std::vector &flow_y); 135 | 136 | }; 137 | 138 | 139 | class zcflow 140 | { 141 | private: 142 | 143 | std::vector blocks; 144 | 145 | cv::Mat flow; 146 | cv::Mat flow_; 147 | cv::Mat flow_x; 148 | cv::Mat flow_y; 149 | 150 | cv::Mat sae_p; 151 | cv::Mat sae_n; 152 | 153 | double toc{0.0}; 154 | 155 | int block_size; 156 | cv::Size n_blocks; 157 | cv::Size flow_blocks; 158 | cv::Point2i b_index; 159 | 160 | 161 | public: 162 | cv::Mat flowbgr; 163 | cv::Mat xy[2]; //X,Y 164 | int camera_size_compensation = 1; 165 | int boundary_compensation = 2; 166 | 167 | void initialise(const cv::Mat_ &sae_p, const cv::Mat_ &sae_n, int block_size); 168 | 169 | void update_sae(const cv::Mat_ &sae_p, const cv::Mat_ &sae_n); 170 | 171 | void clear_blocks(); 172 | 173 | void update(double tic); 174 | 175 | cv::Mat makebgr(); 176 | 177 | }; 178 | 179 | } 180 | -------------------------------------------------------------------------------- /ev2/event-driven/algs/surface.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | using namespace ev; 3 | 4 | cv::Mat surface::getSurface() 5 | { 6 | cv::Mat output; surf(actual_region).convertTo(output, CV_8U); 7 | return output; 8 | } 9 | 10 | void surface::init(int width, int height, int kernel_size, double parameter) 11 | { 12 | if (kernel_size % 2 == 0) 13 | kernel_size++; 14 | this->kernel_size = kernel_size; //kernel_size should be odd 15 | this->half_kernel = kernel_size / 2; 16 | this->parameter = parameter; 17 | 18 | surf = cv::Mat(height+half_kernel*2, width+half_kernel*2, CV_64F, cv::Scalar(0.0)); 19 | actual_region = {half_kernel, half_kernel, width, height}; 20 | } 21 | 22 | void surface::temporalDecay(double ts, double alpha) { 23 | surf *= cv::exp(alpha * (time_now - ts)); 24 | time_now = ts; 25 | } 26 | 27 | void surface::spatialDecay(int k) 28 | { 29 | cv::GaussianBlur(surf, surf, cv::Size(k, k), 0); 30 | } -------------------------------------------------------------------------------- /ev2/event-driven/core.h: -------------------------------------------------------------------------------- 1 | #include "core/comms.h" 2 | #include "core/utilities.h" 3 | #include "core/codec.h" 4 | -------------------------------------------------------------------------------- /ev2/event-driven/core/codec.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #include 19 | #include 20 | 21 | const std::string ev::timeStamp::tag = "TS"; 22 | const std::string ev::addressEvent::tag = "AE"; 23 | const std::string ev::encoded::tag="AE"; 24 | const std::string ev::skinAE::tag = "AE"; 25 | const std::string ev::skinSample::tag = "SKS"; 26 | const std::string ev::flowEvent::tag = "FLOW"; 27 | const std::string ev::gaussianEvent::tag = "GAE"; 28 | const std::string ev::IMUS::tag = "IMU"; 29 | const std::string ev::neuronEvent::tag = "NEU"; 30 | const std::string ev::earEvent::tag = "EAR"; -------------------------------------------------------------------------------- /ev2/event-driven/core/codec.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2021 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | #pragma once 19 | namespace ev { 20 | 21 | 22 | typedef struct timeStamp { 23 | static const std::string tag; 24 | #if ENABLE_TS 25 | unsigned int ts:31; 26 | unsigned int _fill:1; 27 | #else 28 | static const unsigned int ts{0}; 29 | static const unsigned int _fill{0}; 30 | #endif 31 | } timeStamp; 32 | using TS = timeStamp; 33 | 34 | typedef struct addressEvent : public timeStamp { 35 | static const std::string tag; 36 | unsigned int p:1; 37 | unsigned int x:11; 38 | unsigned int y:10; 39 | unsigned int channel:1; 40 | unsigned int type:1; 41 | unsigned int skin:1; 42 | unsigned int corner:1; 43 | unsigned int _fill:6; 44 | } addressEvent; 45 | using AE = addressEvent; 46 | 47 | typedef struct encoded : public timeStamp { 48 | static const std::string tag; 49 | int32_t data; 50 | } encoded; 51 | 52 | typedef struct skinAE : public timeStamp { 53 | static const std::string tag; 54 | unsigned int polarity:1; 55 | unsigned int taxel:4; 56 | unsigned int device:4; 57 | unsigned int constant:23; 58 | } skinAE; 59 | 60 | typedef struct skinValue { 61 | unsigned int _ts:31; 62 | unsigned int _fill1:1; 63 | unsigned int value:16; 64 | unsigned int _fill2:16; 65 | } skinValue; 66 | 67 | typedef struct skinSample { 68 | static const std::string tag; 69 | skinAE address; 70 | skinValue value; 71 | } skinSample; 72 | 73 | /// \brief an AddressEvent with a velocity in visual space 74 | typedef struct flowEvent : public AE { 75 | static const std::string tag; 76 | float vx; 77 | float vy; 78 | } flowEvent; 79 | 80 | /// \brief a LabelledAE with parameters that define a 2D gaussian 81 | typedef struct gaussianEvent { 82 | static const std::string tag; 83 | float sigx; 84 | float sigy; 85 | float sigxy; 86 | } gaussianEvent; 87 | 88 | /// \brief an event with a pixel location, camera number and polarity 89 | typedef struct IMUS : public timeStamp { 90 | static const std::string tag; 91 | int value:16; 92 | unsigned int sensor:4; 93 | unsigned int _r1:2; 94 | unsigned int channel:1; 95 | unsigned int type:1; 96 | unsigned int _r2:8; 97 | } IMUS; 98 | 99 | typedef struct neuronEvent : public timeStamp { 100 | static const std::string tag; 101 | int id; 102 | } neuronEvent; 103 | 104 | typedef struct earEvent : public timeStamp { 105 | static const std::string tag; 106 | unsigned int polarity : 1; 107 | unsigned int freq_chnn : 7; 108 | unsigned int xso_type : 1; 109 | unsigned int auditory_model : 1; 110 | unsigned int _reserved1 : 2; 111 | unsigned int neuron_id : 7; 112 | unsigned int sensor_id : 3; 113 | unsigned int channel : 1; 114 | unsigned int type : 1; 115 | unsigned int cochlea_sensor_id : 3; 116 | unsigned int _fill : 5; 117 | } earEvent; 118 | 119 | } -------------------------------------------------------------------------------- /ev2/event-driven/core/comms.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | 19 | #include 20 | 21 | 22 | -------------------------------------------------------------------------------- /ev2/event-driven/core/utilities.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * valentina.vasco@iit.it 5 | * 6 | * This program is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU Lesser General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | #include 21 | 22 | //#include 23 | #include 24 | #include 25 | 26 | namespace ev { 27 | 28 | unsigned int max_stamp = (1 << TIMER_BITS) - 1; 29 | double tsscaler = 0.000000001 * CLOCK_PERIOD; 30 | double vtsscaler = 1.0 / tsscaler; 31 | #if ENABLE_TS 32 | bool ts_status = true; 33 | #else 34 | bool ts_status = false; 35 | #endif 36 | 37 | benchmark::benchmark() 38 | { 39 | initialised = true; 40 | totals_file.open("/proc/stat"); 41 | if(!totals_file.is_open()) { 42 | yWarning() << "Could not open global cpu statistics (linux only)"; 43 | initialised = false; 44 | } 45 | 46 | std::stringstream path_proc; 47 | path_proc << "/proc/" << getpid() << "/stat"; 48 | process_file.open(path_proc.str()); 49 | if(!process_file.is_open()) { 50 | yWarning() << "Could not open process cpu statistics (linux only)"; 51 | initialised = false; 52 | } 53 | 54 | if(!initialised) { 55 | if(totals_file.is_open()) 56 | totals_file.close(); 57 | if(process_file.is_open()) 58 | process_file.close(); 59 | } 60 | 61 | prevTotal = 0; 62 | prevProcess = 0; 63 | perc = 0.0; 64 | } 65 | 66 | bool benchmark::isReady() 67 | { 68 | return initialised; 69 | } 70 | 71 | double benchmark::getProcessorUsage() 72 | { 73 | unsigned long int temp; 74 | unsigned long int total = 0; 75 | unsigned long int process = 0; 76 | std::string line; 77 | std::stringstream lineparser; 78 | 79 | if(!initialised) return 0; 80 | 81 | //get the cpu totals 82 | 83 | //get the relevant line 84 | totals_file.seekg(0, totals_file.beg); 85 | getline (totals_file, line); 86 | lineparser.clear(); lineparser.str(line); 87 | //yInfo() << lineparser.str(); 88 | 89 | //ignore unnecessary data 90 | lineparser.ignore(std::numeric_limits::max(), ' '); 91 | 92 | //extract the usage 93 | while(lineparser >> temp) 94 | total += temp; 95 | 96 | if(total == prevTotal) return perc; 97 | 98 | //get the process totals 99 | 100 | //get the relevant line 101 | process_file.seekg(0, process_file.beg); 102 | getline(process_file, line); 103 | lineparser.clear(); lineparser.str(line); 104 | //yInfo() << lineparser.str(); 105 | 106 | //ignore unnecessary data 107 | for(int i = 0; i < 13; i++) { 108 | lineparser.ignore(std::numeric_limits::max(), ' '); 109 | } 110 | 111 | //extract the usage 112 | if(lineparser >> temp) { 113 | process += temp; 114 | } 115 | if(lineparser >> temp) { 116 | process += temp; 117 | } 118 | 119 | //calculate the percentage usage since the last call 120 | perc = (double)(process - prevProcess) / (double)(total - prevTotal); 121 | prevProcess = process; prevTotal = total; 122 | 123 | //yInfo() << process << total << perc; 124 | 125 | return perc; 126 | } 127 | 128 | benchmark::~benchmark() 129 | { 130 | if(totals_file.is_open()) 131 | totals_file.close(); 132 | if(process_file.is_open()) 133 | process_file.close(); 134 | } 135 | 136 | 137 | } 138 | -------------------------------------------------------------------------------- /ev2/event-driven/vis.h: -------------------------------------------------------------------------------- 1 | #include "vis/filters.h" 2 | #include "vis/IPT.h" 3 | #include "vis/draw.h" 4 | //#include "vis/vDrawSkin.h" -------------------------------------------------------------------------------- /ev2/event-driven/vis/IPT.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2019 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | 19 | #ifndef VIPT_H 20 | #define VIPT_H 21 | 22 | #include 23 | #include 24 | 25 | namespace ev { 26 | 27 | class vIPT { 28 | 29 | private: 30 | 31 | cv::Size size_shared; 32 | cv::Size size_cam[2]; 33 | cv::Mat cam_matrix[2]; 34 | cv::Mat dist_coeff[2]; 35 | cv::Mat stereo_rotation, stereo_translation; 36 | cv::Mat projection[2]; 37 | cv::Mat rotation[2]; 38 | cv::Mat Q; 39 | 40 | cv::Mat point_forward_map[2]; 41 | cv::Mat point_reverse_map[2]; 42 | cv::Mat mat_reverse_map[2]; 43 | cv::Mat mat_forward_map[2]; 44 | 45 | bool importIntrinsics(int cam, yarp::os::Bottle ¶meters); 46 | bool importStereo(yarp::os::Bottle ¶meters); 47 | bool computeForwardReverseMaps(int cam); 48 | 49 | 50 | public: 51 | 52 | vIPT(); 53 | 54 | const cv::Mat& getQ(); 55 | void setProjectedImageSize(int height, int width); 56 | bool configure(const std::string &calib_file_path, int size_scaler = 2); 57 | bool showMapProjections(double seconds = 0); 58 | void showMonoProjections(int cam, double seconds); 59 | void printValidCalibrationValues(); 60 | 61 | bool sparseForwardTransform(int cam, int &y, int &x); 62 | bool sparseReverseTransform(int cam, int &y, int &x); 63 | bool sparseProjectCam0ToCam1(int &y, int &x); 64 | bool sparseProjectCam1ToCam0(int &y, int &x); 65 | 66 | bool denseForwardTransform(int cam, cv::Mat &m); 67 | bool denseReverseTransform(int cam, cv::Mat &m); 68 | bool denseProjectCam0ToCam1(cv::Mat &m); 69 | bool denseProjectCam1ToCam0(cv::Mat &m); 70 | 71 | 72 | }; 73 | 74 | } 75 | #endif //vitp_h 76 | -------------------------------------------------------------------------------- /ev2/event-driven/vis/draw.cpp: -------------------------------------------------------------------------------- 1 | #include "event-driven/vis/draw.h" 2 | 3 | namespace ev { 4 | 5 | pixelShifter::pixelShifter() { 6 | setRotation(20.0, 40.0); 7 | setShift(0, 0, 1.0); 8 | } 9 | 10 | void pixelShifter::setRotation(double pitch, double yaw) { 11 | thetaX = pitch * 3.14 / 180.0; //PITCH 12 | thetaY = yaw * 3.14 / 180.0; //YAW 13 | 14 | CY = cos(thetaY); 15 | SY = sin(thetaY); 16 | CX = cos(thetaX); 17 | SX = sin(thetaX); 18 | } 19 | 20 | void pixelShifter::setShift(int xoffset, int yoffset, double tsoffset) { 21 | xshift = xoffset; 22 | yshift = yoffset; 23 | ts_scaler = tsoffset; 24 | } 25 | 26 | void pixelShifter::pttr(int &x, int &y, double &z) { 27 | // we want a negative rotation around the y axis (yaw) 28 | // a positive rotation around the x axis (pitch) (no roll) 29 | // the z should always be negative values. 30 | // the points need to be shifted across by negligble amount 31 | // the points need to be shifted up by (x = max, y = 0, ts = 0 rotation) 32 | z = z * ts_scaler; 33 | int xmod = x * CY + z * SY + 0.5; // +0.5 rounds rather than floor 34 | int ymod = y * CX - SX * (-x * SY + z * CY) + 0.5; 35 | //int zmod = y*SX + CX*(-x*SY + z*CY) + 0.5; 36 | x = xmod + xshift; 37 | y = ymod + yshift; //z = zmod; 38 | } 39 | 40 | pixelShifter drawISOBase(int height, int width, double period, cv::Mat &baseimage) 41 | { 42 | int Xlimit = width; 43 | int Ylimit = height; 44 | int Zlimit = width * 3; 45 | double ts_to_axis = (double)Zlimit / period; 46 | 47 | pixelShifter pr; 48 | 49 | //the following calculations make the assumption of a negative yaw and 50 | //a positive pitch 51 | int x, y; double z; 52 | int maxx = 0, maxy = 0, miny = Ylimit, minx = Xlimit; 53 | for(int xi = 0; xi <= Xlimit; xi+=Xlimit) { 54 | for(int yi = 0; yi <= Ylimit; yi+=Ylimit) { 55 | for(double zi = 0; zi <= Zlimit; zi+=Zlimit) { 56 | x = xi; y = yi; z = zi; pr.pttr(x, y, z); 57 | maxx = std::max(maxx, x); 58 | maxy = std::max(maxy, y); 59 | minx = std::min(minx, x); 60 | miny = std::min(miny, y); 61 | } 62 | } 63 | } 64 | 65 | 66 | int imagexshift = -minx + 10; 67 | int imageyshift = -miny + 10; 68 | pr.setShift(imagexshift, imageyshift, ts_to_axis); 69 | 70 | int imagewidth = maxx + imagexshift + 10; 71 | int imageheight = maxy + imageyshift + 10; 72 | 73 | baseimage = cv::Mat(imageheight, imagewidth, CV_8UC3); 74 | baseimage.setTo(0); 75 | 76 | 77 | 78 | //cv::putText(baseimage, std::string("X"), cv::Point(100, 100), 1, 0.5, CV_RGB(0, 0, 0)); 79 | 80 | cv::Scalar invertedtextc = CV_RGB(125, 125, 125); 81 | cv::Vec3b invertedaxisc = cv::Vec3b(255, 255, 255); 82 | cv::Vec3b invertedframec = cv::Vec3b(125, 125, 125); 83 | 84 | for(int xi = 0; xi < Xlimit; xi++) { 85 | x = xi; y = 0; z = 0; pr.pttr(x, y, z); 86 | baseimage.at(y, x) = invertedaxisc; 87 | x = xi; y = Ylimit; z = 0; pr.pttr(x, y, z); 88 | baseimage.at(y, x) = invertedaxisc; 89 | if(xi == Xlimit / 2) { 90 | cv::putText(baseimage, std::string("x"), cv::Point(x-10, y+10), 91 | cv::FONT_ITALIC, 0.5, invertedtextc, 1, 8, false); 92 | } 93 | } 94 | 95 | for(int yi = 0; yi <= Ylimit; yi++) { 96 | x = 0; y = yi; z = 0; pr.pttr(x, y, z); 97 | baseimage.at(y, x) = invertedaxisc; 98 | if(yi == Ylimit / 2) { 99 | cv::putText(baseimage, std::string("y"), cv::Point(x-10, y+10), 100 | cv::FONT_ITALIC, 0.5, invertedtextc, 1, 8, false); 101 | } 102 | x = Xlimit; y = yi; z = 0; pr.pttr(x, y, z); 103 | baseimage.at(y, x) = invertedaxisc; 104 | 105 | } 106 | 107 | unsigned int tsi; 108 | for(tsi = 0; tsi < (unsigned int)(period*0.3); tsi++) { 109 | 110 | x = Xlimit; y = Ylimit; z = tsi; pr.pttr(x, y, z); 111 | baseimage.at(y, x) = invertedaxisc; 112 | 113 | if(tsi == (unsigned int)(period *0.15)) { 114 | cv::putText(baseimage, std::string("t"), cv::Point(x, y+12), 115 | cv::FONT_ITALIC, 0.5, invertedtextc, 1, 8, false); 116 | } 117 | 118 | } 119 | 120 | // for(int i = 0; i < 14; i++) { 121 | 122 | // x = Xlimit-i/2; y = Ylimit; z = tsi-i; pr.pttr(x, y, z); 123 | // baseimage.at(y, x) = invertedaxisc; 124 | 125 | // x = Xlimit+i/2; y = Ylimit; z = tsi-i; pr.pttr(x, y, z); 126 | // baseimage.at(y, x) = invertedaxisc; 127 | // } 128 | 129 | for(tsi = ev::vtsscaler / 10.0; 130 | tsi < (unsigned int)period; 131 | tsi += ev::vtsscaler / 10.0) { 132 | 133 | int zc = tsi + 0.5; 134 | 135 | for(int xi = 0; xi < Xlimit; xi++) { 136 | x = xi; y = 0; z = zc; pr.pttr(x, y, z); 137 | baseimage.at(y, x) = invertedframec; 138 | x = xi; y = Ylimit; z = zc; pr.pttr(x, y, z); 139 | baseimage.at(y, x) = invertedframec; 140 | } 141 | 142 | for(int yi = 0; yi <= Ylimit; yi++) { 143 | x = 0; y = yi; z = zc; pr.pttr(x, y, z); 144 | baseimage.at(y, x) = invertedframec; 145 | x = Xlimit; y = yi; z = zc; pr.pttr(x, y, z); 146 | baseimage.at(y, x) = invertedframec; 147 | 148 | } 149 | 150 | } 151 | 152 | return pr; 153 | } 154 | 155 | 156 | 157 | } // namespace ev -------------------------------------------------------------------------------- /ev2/event-driven/vis/filters.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | 19 | /* 20 | * Copyright (C) 2017 Event-driven Perception for Robotics 21 | * Author: arren.glover@iit.it 22 | * 23 | * This program is free software: you can redistribute it and/or modify 24 | * it under the terms of the GNU Lesser General Public License as published by 25 | * the Free Software Foundation, either version 3 of the License, or 26 | * (at your option) any later version. 27 | * 28 | * This program is distributed in the hope that it will be useful, 29 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 30 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 31 | * GNU General Public License for more details. 32 | * 33 | * You should have received a copy of the GNU Lesser General Public License 34 | * along with this program. If not, see . 35 | */ 36 | 37 | #include "event-driven/vis/filters.h" 38 | 39 | namespace ev { 40 | 41 | void spatialFilter::initialise(int height, int width, double period = 0.1, int range = 1) 42 | { 43 | this->range = range; 44 | this->period = period; 45 | for(auto& sae : saes) 46 | sae = cv::Mat::zeros(height +2*range, width+2*range, CV_64F); 47 | } 48 | 49 | bool spatialFilter::check(const AE& v, const double ts) 50 | { 51 | static int fr = 2*range+1; 52 | bool pass = true; 53 | if(ts - period > saes[v.p].at(v.y+range, v.x+range)) 54 | pass = false; 55 | saes[v.p]({(int)v.x, (int)v.y, fr, fr}) = ts; 56 | return pass; 57 | } 58 | 59 | 60 | vNoiseFilter::vNoiseFilter() : x_sfilter(false), x_tfilter(false), t_sfilter(0), 61 | s_sfilter(1), t_tfilter(0) {} 62 | 63 | void vNoiseFilter::initialise(unsigned int width, unsigned int height) 64 | { 65 | res.height = height; 66 | res.width = width; 67 | SAE = cv::Mat::zeros(height, width, CV_64F); 68 | POL = cv::Mat::ones(height, width, CV_8U)*255; 69 | initialised = true; 70 | } 71 | 72 | const bool& vNoiseFilter::active() 73 | { 74 | return initialised; 75 | } 76 | 77 | void vNoiseFilter::use_temporal_filter(double t_param) 78 | { 79 | x_tfilter = true; 80 | t_tfilter = t_param; 81 | } 82 | 83 | void vNoiseFilter::use_spatial_filter(double t_param, unsigned int s_param) 84 | { 85 | x_sfilter = true; 86 | t_sfilter = t_param; 87 | s_sfilter = s_param; 88 | } 89 | 90 | bool vNoiseFilter::check(int x, int y, int p, double t) 91 | { 92 | 93 | auto add = true; 94 | 95 | if(x_tfilter) { 96 | if(p == POL.at(y, x)) { 97 | if(t - SAE.at(y, x) < t_tfilter) { 98 | SAE.at(y, x) = t; 99 | return false; 100 | } 101 | } 102 | } 103 | 104 | if(x_sfilter) { 105 | add = false; 106 | auto xl = std::max(x-s_sfilter, 0); 107 | auto xh = std::min(x+s_sfilter+1, (int)(res.width)); //+1 becuase I use < sign 108 | auto yl = std::max(y-s_sfilter, 0); 109 | auto yh = std::min(y+s_sfilter+1, (int)(res.height)); //+1 becuase I use < sign 110 | 111 | for(auto xi = xl; xi < xh; ++xi) { 112 | for(auto yi = yl; yi < yh; ++yi) { 113 | double dt = t - SAE.at(yi, xi); 114 | if(dt < t_sfilter) { 115 | add = true; 116 | break; 117 | } 118 | } 119 | } 120 | } 121 | 122 | POL.at(y, x) = p; 123 | SAE.at(y, x) = t; 124 | 125 | return add; 126 | } 127 | 128 | } 129 | 130 | 131 | -------------------------------------------------------------------------------- /ev2/event-driven/vis/filters.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2021 Event-driven Perception for Robotics 3 | * Author: arren.glover@iit.it 4 | * 5 | * This program is free software: you can redistribute it and/or modify 6 | * it under the terms of the GNU Lesser General Public License as published by 7 | * the Free Software Foundation, either version 3 of the License, or 8 | * (at your option) any later version. 9 | * 10 | * This program is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | * GNU General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public License 16 | * along with this program. If not, see . 17 | */ 18 | 19 | #pragma once 20 | 21 | #include 22 | #include "event-driven/core.h" 23 | 24 | namespace ev { 25 | 26 | /// \brief an efficient event-based salt and pepper filter 27 | class vNoiseFilter 28 | { 29 | private: 30 | 31 | bool x_sfilter; 32 | bool x_tfilter; 33 | 34 | double t_sfilter; 35 | int s_sfilter; 36 | double t_tfilter; 37 | 38 | cv::Mat SAE; 39 | cv::Mat POL; 40 | 41 | resolution res; 42 | bool initialised{false}; 43 | 44 | public: 45 | 46 | /// \brief constructor 47 | vNoiseFilter(); 48 | 49 | /// \brief initialise the sensor size and the filter parameters. 50 | void initialise(unsigned int width, unsigned int height); 51 | 52 | const bool& active(); 53 | 54 | /// \brief filter using temporal coincidence 55 | void use_temporal_filter(double t_param); 56 | 57 | /// \brief filter using spatial coincidence 58 | void use_spatial_filter(double t_param, unsigned int s_param = 1); 59 | 60 | /// \brief classifies the event as noise or signal 61 | /// \returns false if the event is noise 62 | bool check(int x, int y, int p, double t); 63 | 64 | }; 65 | 66 | class spatialFilter 67 | { 68 | private: 69 | std::array saes; 70 | double period; 71 | double range; 72 | 73 | public: 74 | spatialFilter() {}; 75 | void initialise(int height, int width, double period, int range); 76 | bool check(const AE& v, const double ts); 77 | 78 | }; 79 | 80 | 81 | } 82 | -------------------------------------------------------------------------------- /python_tools/README.md: -------------------------------------------------------------------------------- 1 | # Offline Python Scripts 2 | 3 | `event-driven` has two companion repositories for offline dataset manipulation which has replaced much of the python helper functions: 4 | 5 | ### BIMVEE 6 | 7 | [Batch Import, Manipulation, Visualisation, and Export of Events](https://github.com/event-driven-robotics/bimvee) 8 | 9 | Loading and converting datasets from most of the common event datasets 10 | 11 | ### MUSTARD 12 | 13 | [MUlti STream Agnostic Representation Dataplayer](https://github.com/event-driven-robotics/mustard) 14 | 15 | Uses BIMVEE to load any dataset and visualise with tracking bar. Can be used to annotate data 16 | 17 | 18 | ### Scripts 19 | 20 | Here we have a couple of scripts: 21 | 22 | `ev2converter.py` - uses BIMVEE to convert old event-driven datasets to the new event-driven-2.0 format. 23 | 24 | `plot_imu_dump.py` - datasets dumped from imu calibration methods can be visualised to understand the data quality. 25 | 26 | 27 | -------------------------------------------------------------------------------- /python_tools/ev2converter.py: -------------------------------------------------------------------------------- 1 | from bimvee.importIitYarp import importIitYarp 2 | from bimvee.exportIitYarp import exportIitYarp 3 | import argparse 4 | import os, shutil 5 | 6 | parser = argparse.ArgumentParser() 7 | parser.add_argument('--in_path', dest='in_path', type=str, required=True, help='Path to input file to be converted') 8 | parser.add_argument('--out_path', dest='out_path', type=str, help='Path to save converted file to') 9 | args = parser.parse_args() 10 | 11 | if args.out_path is None: 12 | if os.path.isdir(args.in_path): 13 | args.out_path = os.path.join(args.in_path, 'ev2') 14 | else: 15 | args.out_path = os.path.join(os.path.dirname(args.in_path), 'ev2') 16 | 17 | if os.path.exists(args.out_path): 18 | if input(f"Directory {args.out_path} already exists. Are you sure you want to replace all of its content? (y/n)") == 'y': 19 | shutil.rmtree(args.out_path) 20 | else: 21 | print('Exiting') 22 | exit(0) 23 | 24 | data = importIitYarp(filePathOrName=args.in_path) 25 | exportIitYarp(data, exportFilePath=args.out_path, exportAsEv2=True, exportTimestamps=False, viewerApp=False, minTimeStepPerBottle=0.0005) 26 | 27 | -------------------------------------------------------------------------------- /python_tools/plot_imu_dump.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | 4 | fn_acc = '/home/aglover/local/imu/ajg9.acc.mat' 5 | fn_gyr = '/home/aglover/local/imu/ajg9.gyr.mat' 6 | 7 | acc_data = np.genfromtxt(fn_acc) 8 | print("==Acceleration Statistics==") 9 | print("Mean Sample Time:", np.mean(np.diff(acc_data[:, 0])), "seconds") 10 | print("Start", acc_data[0, 0], "Stop", acc_data[-1, 0]) 11 | print("X: [", np.min(acc_data[:, 1]), np.max(acc_data[:, 1]), "]") 12 | print("Y: [", np.min(acc_data[:, 2]), np.max(acc_data[:, 2]), "]") 13 | print("Z: [", np.min(acc_data[:, 3]), np.max(acc_data[:, 3]), "]") 14 | 15 | gyr_data = np.genfromtxt(fn_gyr) 16 | print("==Gyroscope Statistics==") 17 | print("Mean Sample Time:", np.mean(np.diff(gyr_data[:, 0])), "seconds") 18 | print("Start", gyr_data[0, 0], "Stop", gyr_data[-1, 0]) 19 | print("X: [", np.min(gyr_data[:, 1]), np.max(gyr_data[:, 1]), "]") 20 | print("Y: [", np.min(gyr_data[:, 2]), np.max(gyr_data[:, 2]), "]") 21 | print("Z: [", np.min(gyr_data[:, 3]), np.max(gyr_data[:, 3]), "]") 22 | 23 | plt.figure(1) 24 | plt.plot(acc_data[:, 0], acc_data[:, 1]) 25 | plt.plot(acc_data[:, 0], acc_data[:, 2]) 26 | plt.plot(acc_data[:, 0], acc_data[:, 3]) 27 | plt.title('Accelerometer Data'); 28 | plt.draw() 29 | 30 | plt.figure(2) 31 | plt.plot(gyr_data[:, 0], gyr_data[:, 1]) 32 | plt.plot(gyr_data[:, 0], gyr_data[:, 2]) 33 | plt.plot(gyr_data[:, 0], gyr_data[:, 3]) 34 | plt.title('Gyroscope Data'); 35 | plt.draw() 36 | 37 | plt.show() --------------------------------------------------------------------------------