├── VideoFlow
├── core
│ ├── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── misc.py
│ │ └── logger.py
│ └── Networks
│ │ ├── BOFNet
│ │ └── __init__.py
│ │ ├── MOFNetStack
│ │ └── __init__.py
│ │ └── __init__.py
├── flow_dataset_mf
│ ├── flyingthings_thres5.pkl
│ ├── sintel_training_scene.pkl
│ ├── convert_HD1K.py
│ ├── convert_sintel.py
│ └── convert_things.py
├── how_to_run.txt
├── alt_cuda_corr
│ ├── run_install.sh
│ ├── setup.py
│ └── correlation.cpp
├── flow_datasets
│ ├── KITTI
│ │ └── generate_KITTI_list.py
│ ├── hd1k_three_frames
│ │ └── convert_HD1K.py
│ ├── flying_things_three_frames
│ │ └── convert_things.py
│ └── sintel_three_frames
│ │ └── convert_sintel.py
└── configs
│ ├── kitti.py
│ ├── sintel_submission.py
│ ├── sintel.py
│ ├── things.py
│ ├── things_multiframes.py
│ ├── multiframes_sintel_submission.py
│ ├── sintel_multiframes.py
│ └── kitti_multiframes.py
├── ZoeDepth
├── MiDaS
│ ├── input
│ │ └── .placeholder
│ ├── output
│ │ └── .placeholder
│ ├── tf
│ │ ├── input
│ │ │ └── .placeholder
│ │ ├── output
│ │ │ └── .placeholder
│ │ └── utils.py
│ ├── weights
│ │ └── .placeholder
│ ├── mobile
│ │ ├── android
│ │ │ ├── app
│ │ │ │ ├── .gitignore
│ │ │ │ ├── src
│ │ │ │ │ ├── androidTest
│ │ │ │ │ │ ├── assets
│ │ │ │ │ │ │ ├── fox-mobilenet_v1_1.0_224_task_api.txt
│ │ │ │ │ │ │ └── fox-mobilenet_v1_1.0_224_support.txt
│ │ │ │ │ │ └── java
│ │ │ │ │ │ │ └── AndroidManifest.xml
│ │ │ │ │ └── main
│ │ │ │ │ │ ├── res
│ │ │ │ │ │ ├── drawable-hdpi
│ │ │ │ │ │ │ └── ic_launcher.png
│ │ │ │ │ │ ├── drawable-mdpi
│ │ │ │ │ │ │ └── ic_launcher.png
│ │ │ │ │ │ ├── drawable-xxhdpi
│ │ │ │ │ │ │ ├── tfl2_logo.png
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── icn_chevron_up.png
│ │ │ │ │ │ │ ├── tfl2_logo_dark.png
│ │ │ │ │ │ │ └── icn_chevron_down.png
│ │ │ │ │ │ ├── mipmap-hdpi
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ │ │ ├── mipmap-mdpi
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ │ │ ├── mipmap-xhdpi
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ │ │ ├── mipmap-xxhdpi
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ │ │ ├── mipmap-xxxhdpi
│ │ │ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ │ │ ├── values
│ │ │ │ │ │ │ ├── dimens.xml
│ │ │ │ │ │ │ ├── colors.xml
│ │ │ │ │ │ │ ├── styles.xml
│ │ │ │ │ │ │ └── strings.xml
│ │ │ │ │ │ ├── mipmap-anydpi-v26
│ │ │ │ │ │ │ ├── ic_launcher.xml
│ │ │ │ │ │ │ └── ic_launcher_round.xml
│ │ │ │ │ │ ├── drawable
│ │ │ │ │ │ │ ├── ic_baseline_remove.xml
│ │ │ │ │ │ │ ├── ic_baseline_add.xml
│ │ │ │ │ │ │ ├── bottom_sheet_bg.xml
│ │ │ │ │ │ │ └── rectangle.xml
│ │ │ │ │ │ ├── layout
│ │ │ │ │ │ │ └── tfe_ic_camera_connection_fragment.xml
│ │ │ │ │ │ └── drawable-v24
│ │ │ │ │ │ │ └── ic_launcher_foreground.xml
│ │ │ │ │ │ ├── java
│ │ │ │ │ │ └── org
│ │ │ │ │ │ │ └── tensorflow
│ │ │ │ │ │ │ └── lite
│ │ │ │ │ │ │ └── examples
│ │ │ │ │ │ │ └── classification
│ │ │ │ │ │ │ └── customview
│ │ │ │ │ │ │ ├── ResultsView.java
│ │ │ │ │ │ │ └── OverlayView.java
│ │ │ │ │ │ └── AndroidManifest.xml
│ │ │ │ ├── proguard-rules.pro
│ │ │ │ └── build.gradle
│ │ │ ├── settings.gradle
│ │ │ ├── gradle
│ │ │ │ └── wrapper
│ │ │ │ │ ├── gradle-wrapper.jar
│ │ │ │ │ └── gradle-wrapper.properties
│ │ │ ├── models
│ │ │ │ ├── src
│ │ │ │ │ └── main
│ │ │ │ │ │ └── AndroidManifest.xml
│ │ │ │ ├── download.gradle
│ │ │ │ ├── proguard-rules.pro
│ │ │ │ └── build.gradle
│ │ │ ├── lib_support
│ │ │ │ ├── src
│ │ │ │ │ └── main
│ │ │ │ │ │ └── AndroidManifest.xml
│ │ │ │ ├── proguard-rules.pro
│ │ │ │ └── build.gradle
│ │ │ ├── lib_task_api
│ │ │ │ ├── src
│ │ │ │ │ └── main
│ │ │ │ │ │ ├── AndroidManifest.xml
│ │ │ │ │ │ └── java
│ │ │ │ │ │ └── org
│ │ │ │ │ │ └── tensorflow
│ │ │ │ │ │ └── lite
│ │ │ │ │ │ └── examples
│ │ │ │ │ │ └── classification
│ │ │ │ │ │ └── tflite
│ │ │ │ │ │ ├── ClassifierQuantizedEfficientNet.java
│ │ │ │ │ │ ├── ClassifierFloatMobileNet.java
│ │ │ │ │ │ ├── ClassifierQuantizedMobileNet.java
│ │ │ │ │ │ └── ClassifierFloatEfficientNet.java
│ │ │ │ ├── proguard-rules.pro
│ │ │ │ └── build.gradle
│ │ │ ├── .gitignore
│ │ │ ├── build.gradle
│ │ │ ├── README.md
│ │ │ ├── gradle.properties
│ │ │ └── LICENSE
│ │ └── ios
│ │ │ ├── .gitignore
│ │ │ ├── Midas
│ │ │ ├── Assets.xcassets
│ │ │ │ ├── Contents.json
│ │ │ │ ├── tfl_logo.png
│ │ │ │ └── AppIcon.appiconset
│ │ │ │ │ ├── 20.png
│ │ │ │ │ ├── 29.png
│ │ │ │ │ ├── 40.png
│ │ │ │ │ ├── 50.png
│ │ │ │ │ ├── 57.png
│ │ │ │ │ ├── 58.png
│ │ │ │ │ ├── 60.png
│ │ │ │ │ ├── 72.png
│ │ │ │ │ ├── 76.png
│ │ │ │ │ ├── 80.png
│ │ │ │ │ ├── 87.png
│ │ │ │ │ ├── 100.png
│ │ │ │ │ ├── 1024.png
│ │ │ │ │ ├── 114.png
│ │ │ │ │ ├── 120.png
│ │ │ │ │ ├── 144.png
│ │ │ │ │ ├── 152.png
│ │ │ │ │ ├── 167.png
│ │ │ │ │ └── 180.png
│ │ │ ├── Cells
│ │ │ │ └── InfoCell.swift
│ │ │ ├── Constants.swift
│ │ │ ├── Camera Feed
│ │ │ │ └── PreviewView.swift
│ │ │ ├── AppDelegate.swift
│ │ │ ├── Info.plist
│ │ │ ├── Views
│ │ │ │ └── OverlayView.swift
│ │ │ └── Extensions
│ │ │ │ └── CGSizeExtension.swift
│ │ │ ├── Midas.xcodeproj
│ │ │ ├── project.xcworkspace
│ │ │ │ ├── contents.xcworkspacedata
│ │ │ │ ├── xcuserdata
│ │ │ │ │ └── admin.xcuserdatad
│ │ │ │ │ │ └── UserInterfaceState.xcuserstate
│ │ │ │ └── xcshareddata
│ │ │ │ │ └── IDEWorkspaceChecks.plist
│ │ │ └── xcuserdata
│ │ │ │ └── admin.xcuserdatad
│ │ │ │ └── xcschemes
│ │ │ │ └── xcschememanagement.plist
│ │ │ ├── Podfile
│ │ │ └── RunScripts
│ │ │ └── download_models.sh
│ ├── figures
│ │ ├── Comparison.png
│ │ └── Improvement_vs_FPS.png
│ ├── ros
│ │ ├── additions
│ │ │ ├── do_catkin_make.sh
│ │ │ ├── downloads.sh
│ │ │ ├── make_package_cpp.sh
│ │ │ ├── install_ros_noetic_ubuntu_20.sh
│ │ │ └── install_ros_melodic_ubuntu_17_18.sh
│ │ ├── launch_midas_cpp.sh
│ │ ├── run_talker_listener_test.sh
│ │ ├── midas_cpp
│ │ │ ├── launch
│ │ │ │ ├── midas_cpp.launch
│ │ │ │ └── midas_talker_listener.launch
│ │ │ └── scripts
│ │ │ │ ├── talker.py
│ │ │ │ ├── listener.py
│ │ │ │ └── listener_original.py
│ │ └── LICENSE
│ ├── environment.yaml
│ ├── midas
│ │ ├── backbones
│ │ │ ├── swin.py
│ │ │ ├── swin2.py
│ │ │ ├── next_vit.py
│ │ │ └── swin_common.py
│ │ └── base_model.py
│ ├── Dockerfile
│ └── LICENSE
├── ui
│ └── ui_requirements.txt
├── input.png
├── output.png
├── output_colored.png
├── assets
│ └── zoedepth-teaser.png
├── environment.yml
├── zoedepth
│ ├── models
│ │ ├── zoedepth
│ │ │ ├── config_zoedepth_kitti.json
│ │ │ ├── __init__.py
│ │ │ └── config_zoedepth.json
│ │ ├── __init__.py
│ │ ├── base_models
│ │ │ └── __init__.py
│ │ └── zoedepth_nk
│ │ │ ├── __init__.py
│ │ │ └── config_zoedepth_nk.json
│ ├── utils
│ │ ├── arg_utils.py
│ │ └── __init__.py
│ ├── data
│ │ └── __init__.py
│ └── trainers
│ │ └── builder.py
├── LICENSE
├── sanity_hub.py
└── predict_depth_1.py
├── vdo_slam.png
├── YOLOv8
├── img
│ ├── 0000000281.png
│ ├── 0000000381.png
│ └── 0000000431.png
├── include
│ ├── data_struct.h
│ ├── detector_opencv_dnn.h
│ ├── segmentor_opencv_dnn.h
│ └── detector_onnxruntime.h
└── bytetrack
│ └── include
│ ├── STrack.h
│ ├── lapjv.h
│ ├── BYTETracker.h
│ └── kalmanFilter.h
├── onnxruntime-linux-x64-1.16.3.tgz
├── how_to_run.txt
├── include
├── cvplot
│ ├── cvplot.h
│ ├── color.h
│ └── highgui.h
├── Optimizer.h
├── Converter.h
├── Viewer.h
└── System.h
├── dependencies
└── g2o
│ ├── README.txt
│ ├── cmake_modules
│ └── FindCSparse.cmake
│ ├── config.h
│ ├── config.h.in
│ ├── g2o
│ ├── core
│ │ ├── g2o_core_api.h
│ │ ├── parameter.cpp
│ │ ├── robust_kernel.cpp
│ │ ├── base_vertex.hpp
│ │ ├── parameter.h
│ │ └── optimization_algorithm_gauss_newton.h
│ ├── solvers
│ │ ├── g2o_csparse_extension_api.h
│ │ ├── csparse_extension.h
│ │ └── csparse_helper.h
│ ├── types
│ │ ├── types_dyn_slam3d.h
│ │ ├── isometry3d_gradients.cpp
│ │ ├── se3_ops.h
│ │ └── dquat2mat.h
│ └── stuff
│ │ ├── opengl_wrapper.h
│ │ ├── os_specific.h
│ │ └── os_specific.c
│ └── license-bsd.txt
├── src
└── Map.cc
├── LICENSE.txt
├── cmake_modules
└── FindCSparse.cmake
├── .gitignore
└── example
└── kitti-0018-0020.yaml
/VideoFlow/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/VideoFlow/core/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/input/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/output/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/tf/input/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/tf/output/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/weights/.placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/VideoFlow/core/Networks/BOFNet/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/VideoFlow/core/Networks/MOFNetStack/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ZoeDepth/ui/ui_requirements.txt:
--------------------------------------------------------------------------------
1 | gradio
2 | trimesh==3.9.42
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
3 | /build/
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/.gitignore:
--------------------------------------------------------------------------------
1 | # ignore model file
2 | #*.tflite
3 |
--------------------------------------------------------------------------------
/vdo_slam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/vdo_slam.png
--------------------------------------------------------------------------------
/ZoeDepth/input.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/input.png
--------------------------------------------------------------------------------
/ZoeDepth/output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/output.png
--------------------------------------------------------------------------------
/YOLOv8/img/0000000281.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/YOLOv8/img/0000000281.png
--------------------------------------------------------------------------------
/YOLOv8/img/0000000381.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/YOLOv8/img/0000000381.png
--------------------------------------------------------------------------------
/YOLOv8/img/0000000431.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/YOLOv8/img/0000000431.png
--------------------------------------------------------------------------------
/ZoeDepth/output_colored.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/output_colored.png
--------------------------------------------------------------------------------
/onnxruntime-linux-x64-1.16.3.tgz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/onnxruntime-linux-x64-1.16.3.tgz
--------------------------------------------------------------------------------
/ZoeDepth/assets/zoedepth-teaser.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/assets/zoedepth-teaser.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/figures/Comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/figures/Comparison.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/androidTest/assets/fox-mobilenet_v1_1.0_224_task_api.txt:
--------------------------------------------------------------------------------
1 | red_fox 0.85
2 | kit_fox 0.13
3 | grey_fox 0.02
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/figures/Improvement_vs_FPS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/figures/Improvement_vs_FPS.png
--------------------------------------------------------------------------------
/how_to_run.txt:
--------------------------------------------------------------------------------
1 | ./example/vdo_slam example/kitti_10_03.yaml /home/spurs/dataset/kitti_raw/2011_10_03/2011_10_03_drive_0047_sync/image_02
2 |
3 |
4 |
--------------------------------------------------------------------------------
/VideoFlow/flow_dataset_mf/flyingthings_thres5.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/VideoFlow/flow_dataset_mf/flyingthings_thres5.pkl
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/androidTest/assets/fox-mobilenet_v1_1.0_224_support.txt:
--------------------------------------------------------------------------------
1 | red_fox 0.79403335
2 | kit_fox 0.16753247
3 | grey_fox 0.03619214
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/additions/do_catkin_make.sh:
--------------------------------------------------------------------------------
1 | mkdir src
2 | catkin_make
3 | source devel/setup.bash
4 | echo $ROS_PACKAGE_PATH
5 | chmod +x ./devel/setup.bash
6 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'TFLite Image Classification Demo App'
2 | include ':app', ':lib_support', ':lib_task_api', ':models'
--------------------------------------------------------------------------------
/VideoFlow/how_to_run.txt:
--------------------------------------------------------------------------------
1 | python -u inference_sequence.py --mode MOF --seq_dir /home/spurs/dataset/2011_10_03/2011_10_03_drive_0047_sync/image_02/data --vis_dir demo_flow_vis
2 |
3 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/tfl_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/tfl_logo.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/20.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/20.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/29.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/29.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/40.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/50.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/50.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/57.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/57.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/58.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/58.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/60.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/60.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/72.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/76.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/80.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/80.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/87.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/87.png
--------------------------------------------------------------------------------
/include/cvplot/cvplot.h:
--------------------------------------------------------------------------------
1 | #ifndef CVPLOT_H
2 | #define CVPLOT_H
3 |
4 | #include "color.h"
5 | #include "figure.h"
6 | #include "highgui.h"
7 | #include "window.h"
8 |
9 | #endif // CVPLOT_H
10 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/100.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/1024.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/114.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/114.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/120.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/120.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/144.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/152.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/152.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/167.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/167.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/180.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas/Assets.xcassets/AppIcon.appiconset/180.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/additions/downloads.sh:
--------------------------------------------------------------------------------
1 | mkdir ~/.ros
2 | wget https://github.com/isl-org/MiDaS/releases/download/v2_1/model-small-traced.pt
3 | cp ./model-small-traced.pt ~/.ros/model-small-traced.pt
4 |
5 |
6 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/models/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/launch_midas_cpp.sh:
--------------------------------------------------------------------------------
1 | source ~/catkin_ws/devel/setup.bash
2 | roslaunch midas_cpp midas_cpp.launch model_name:="model-small-traced.pt" input_topic:="image_topic" output_topic:="midas_topic" out_orig_size:="true"
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_support/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/libraries
5 | /.idea/modules.xml
6 | /.idea/workspace.xml
7 | .DS_Store
8 | /build
9 | /captures
10 | .externalNativeBuild
11 |
12 | /.gradle/
13 | /.idea/
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 15dp
4 | 8dp
5 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/androidTest/java/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
--------------------------------------------------------------------------------
/dependencies/g2o/README.txt:
--------------------------------------------------------------------------------
1 | You should have received this g2o version along with ORB-SLAM2 (https://github.com/raulmur/ORB_SLAM2).
2 | See the original g2o library at: https://github.com/RainerKuemmerle/g2o
3 | All files included in this g2o version are BSD, see license-bsd.txt
4 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lturing/VDO_SLAM_modified/HEAD/ZoeDepth/MiDaS/mobile/ios/Midas.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/VideoFlow/alt_cuda_corr/run_install.sh:
--------------------------------------------------------------------------------
1 | PYTHONPATH=/mnt/cache/shixiaoyu1/.local/lib/python3.6/site-packages
2 | export CXX=/mnt/lustre/share/gcc/gcc-5.4/bin/g++
3 | export CC=/mnt/lustre/share/gcc/gcc-5.4/bin/gcc
4 | export CUDA_HOME=/mnt/lustre/share/cuda-11.2
5 | srun --cpus-per-task=5 --ntasks-per-node=1 -p ISPCodec -n1 --gres=gpu:1 python setup.py install --user
6 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #ffa800
4 | #ff6f00
5 | #425066
6 |
7 | #66000000
8 |
9 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/run_talker_listener_test.sh:
--------------------------------------------------------------------------------
1 | # place any test.mp4 file near with this file
2 |
3 | # roscore
4 | # rosnode kill -a
5 |
6 | source ~/catkin_ws/devel/setup.bash
7 |
8 | roscore &
9 | P1=$!
10 | rosrun midas_cpp talker.py &
11 | P2=$!
12 | rosrun midas_cpp listener_original.py &
13 | P3=$!
14 | rosrun midas_cpp listener.py &
15 | P4=$!
16 | wait $P1 $P2 $P3 $P4
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/environment.yaml:
--------------------------------------------------------------------------------
1 | name: midas-py310
2 | channels:
3 | - pytorch
4 | - defaults
5 | dependencies:
6 | - nvidia::cudatoolkit=11.7
7 | - python=3.10.8
8 | - pytorch::pytorch=1.13.0
9 | - torchvision=0.14.0
10 | - pip=22.3.1
11 | - numpy=1.23.4
12 | - pip:
13 | - opencv-python==4.6.0.66
14 | - imutils==0.5.4
15 | - timm==0.6.12
16 | - einops==0.6.0
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable/ic_baseline_remove.xml:
--------------------------------------------------------------------------------
1 |
6 |
9 |
10 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable/ic_baseline_add.xml:
--------------------------------------------------------------------------------
1 |
6 |
9 |
10 |
--------------------------------------------------------------------------------
/VideoFlow/core/Networks/__init__.py:
--------------------------------------------------------------------------------
1 | import torch
2 | def build_network(cfg):
3 | name = cfg.network
4 | if name == 'MOFNetStack':
5 | from .MOFNetStack.network import MOFNet as network
6 | elif name == 'BOFNet':
7 | from .BOFNet.network import BOFNet as network
8 | else:
9 | raise ValueError(f"Network = {name} is not a valid optimizer!")
10 |
11 | return network(cfg[name])
12 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/midas/backbones/swin.py:
--------------------------------------------------------------------------------
1 | import timm
2 |
3 | from .swin_common import _make_swin_backbone
4 |
5 |
6 | def _make_pretrained_swinl12_384(pretrained, hooks=None):
7 | model = timm.create_model("swin_large_patch4_window12_384", pretrained=pretrained)
8 |
9 | hooks = [1, 1, 17, 1] if hooks == None else hooks
10 | return _make_swin_backbone(
11 | model,
12 | hooks=hooks
13 | )
14 |
--------------------------------------------------------------------------------
/src/Map.cc:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is part of VDO-SLAM.
3 | *
4 | * Copyright (C) 2019-2020 Jun Zhang (The Australian National University)
5 | * For more information see
6 | *
7 | **/
8 |
9 | #include "Map.h"
10 |
11 | namespace VDO_SLAM
12 | {
13 |
14 | Map::Map():mnMaxKFid(0),mnBigChangeIdx(0)
15 | {
16 | }
17 |
18 | } //namespace VDO_SLAM
19 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/models/download.gradle:
--------------------------------------------------------------------------------
1 | def modelFloatDownloadUrl = "https://github.com/isl-org/MiDaS/releases/download/v2_1/model_opt.tflite"
2 | def modelFloatFile = "model_opt.tflite"
3 |
4 | task downloadModelFloat(type: Download) {
5 | src "${modelFloatDownloadUrl}"
6 | dest project.ext.ASSET_DIR + "/${modelFloatFile}"
7 | overwrite false
8 | }
9 |
10 | preBuild.dependsOn downloadModelFloat
11 |
--------------------------------------------------------------------------------
/VideoFlow/alt_cuda_corr/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 | from torch.utils.cpp_extension import BuildExtension, CUDAExtension
3 |
4 |
5 | setup(
6 | name='correlation',
7 | ext_modules=[
8 | CUDAExtension('alt_cuda_corr',
9 | sources=['correlation.cpp', 'correlation_kernel.cu'],
10 | extra_compile_args={'cxx': [], 'nvcc': ['-O3']}),
11 | ],
12 | cmdclass={
13 | 'build_ext': BuildExtension
14 | })
15 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/midas/base_model.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 |
4 | class BaseModel(torch.nn.Module):
5 | def load(self, path):
6 | """Load model from file.
7 |
8 | Args:
9 | path (str): file path
10 | """
11 | parameters = torch.load(path, map_location=torch.device('cpu'))
12 |
13 | if "optimizer" in parameters:
14 | parameters = parameters["model"]
15 |
16 | self.load_state_dict(parameters)
17 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Podfile:
--------------------------------------------------------------------------------
1 | # Uncomment the next line to define a global platform for your project
2 | platform :ios, '12.0'
3 |
4 | target 'Midas' do
5 | # Comment the next line if you're not using Swift and don't want to use dynamic frameworks
6 | use_frameworks!
7 |
8 | # Pods for Midas
9 | pod 'TensorFlowLiteSwift', '~> 0.0.1-nightly'
10 | pod 'TensorFlowLiteSwift/CoreML', '~> 0.0.1-nightly'
11 | pod 'TensorFlowLiteSwift/Metal', '~> 0.0.1-nightly'
12 | end
13 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/additions/make_package_cpp.sh:
--------------------------------------------------------------------------------
1 | cd ~/catkin_ws/src
2 | catkin_create_pkg midas_cpp std_msgs roscpp cv_bridge sensor_msgs image_transport
3 | cd ~/catkin_ws
4 | catkin_make
5 |
6 | chmod +x ~/catkin_ws/devel/setup.bash
7 | printf "\nsource ~/catkin_ws/devel/setup.bash" >> ~/.bashrc
8 | source ~/catkin_ws/devel/setup.bash
9 |
10 |
11 | sudo rosdep init
12 | rosdep update
13 | #rospack depends1 midas_cpp
14 | roscd midas_cpp
15 | #cat package.xml
16 | #rospack depends midas_cpp
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | VDO-SLAM is released under a GPLv3 license (see LICENSE-GPL.txt).
2 | Please see Dependencies.md for a list of all included code and library dependencies which are not property of the authors of VDO-SLAM.
3 |
4 | For a closed-source version of VDO-SLAM for commercial purposes, please contact the authors.
5 |
6 | If you use VDO-SLAM in an academic work, please cite the most relevant publication associated by visiting:
7 | https://github.com/halajun/VDO_SLAM
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas.xcodeproj/xcuserdata/admin.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | PoseNet.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 3
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable/bottom_sheet_bg.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable/rectangle.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
12 |
13 |
--------------------------------------------------------------------------------
/ZoeDepth/environment.yml:
--------------------------------------------------------------------------------
1 | name: zoe
2 | channels:
3 | - pytorch
4 | - nvidia
5 | - conda-forge
6 | dependencies:
7 | - cuda=11.7.1
8 | - h5py=3.7.0
9 | - hdf5=1.12.2
10 | - matplotlib=3.6.2
11 | - matplotlib-base=3.6.2
12 | - numpy=1.24.1
13 | - opencv=4.6.0
14 | - pip=22.3.1
15 | - python=3.9.7
16 | - pytorch=1.13.1
17 | - pytorch-cuda=11.7
18 | - pytorch-mutex=1.0
19 | - scipy=1.10.0
20 | - torchaudio=0.13.1
21 | - torchvision=0.14.1
22 | - pip:
23 | - huggingface-hub==0.11.1
24 | - timm==0.6.12
25 | - tqdm==4.64.1
26 | - wandb==0.13.9
27 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/RunScripts/download_models.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Download TF Lite model from the internet if it does not exist.
3 |
4 | TFLITE_MODEL="model_opt.tflite"
5 | TFLITE_FILE="Midas/Model/${TFLITE_MODEL}"
6 | MODEL_SRC="https://github.com/isl-org/MiDaS/releases/download/v2/${TFLITE_MODEL}"
7 |
8 | if test -f "${TFLITE_FILE}"; then
9 | echo "INFO: TF Lite model already exists. Skip downloading and use the local model."
10 | else
11 | curl --create-dirs -o "${TFLITE_FILE}" -LJO "${MODEL_SRC}"
12 | echo "INFO: Downloaded TensorFlow Lite model to ${TFLITE_FILE}."
13 | fi
14 |
15 |
--------------------------------------------------------------------------------
/VideoFlow/flow_dataset_mf/sintel_training_scene.pkl:
--------------------------------------------------------------------------------
1 | (lp0
2 | S'ambush_6'
3 | p1
4 | aS'bandage_1'
5 | p2
6 | aS'market_2'
7 | p3
8 | aS'cave_2'
9 | p4
10 | aS'temple_2'
11 | p5
12 | aS'shaman_3'
13 | p6
14 | aS'sleeping_2'
15 | p7
16 | aS'market_6'
17 | p8
18 | aS'cave_4'
19 | p9
20 | aS'ambush_2'
21 | p10
22 | aS'market_5'
23 | p11
24 | aS'alley_2'
25 | p12
26 | aS'ambush_5'
27 | p13
28 | aS'ambush_7'
29 | p14
30 | aS'temple_3'
31 | p15
32 | aS'bandage_2'
33 | p16
34 | aS'mountain_1'
35 | p17
36 | aS'alley_1'
37 | p18
38 | aS'shaman_2'
39 | p19
40 | aS'bamboo_1'
41 | p20
42 | aS'ambush_4'
43 | p21
44 | aS'bamboo_2'
45 | p22
46 | aS'sleeping_1'
47 | p23
48 | a.
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/zoedepth/config_zoedepth_kitti.json:
--------------------------------------------------------------------------------
1 | {
2 | "model": {
3 | "bin_centers_type": "normed",
4 | "img_size": [384, 768]
5 | },
6 |
7 | "train": {
8 | },
9 |
10 | "infer":{
11 | "train_midas": false,
12 | "use_pretrained_midas": false,
13 | "pretrained_resource" : "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_K.pt",
14 | "force_keep_ar": true
15 | },
16 |
17 | "eval":{
18 | "train_midas": false,
19 | "use_pretrained_midas": false,
20 | "pretrained_resource" : "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_K.pt"
21 | }
22 | }
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:4.0.0'
11 | classpath 'de.undercouch:gradle-download-task:4.0.2'
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 | }
23 |
24 | task clean(type: Delete) {
25 | delete rootProject.buildDir
26 | }
27 |
28 |
--------------------------------------------------------------------------------
/dependencies/g2o/cmake_modules/FindCSparse.cmake:
--------------------------------------------------------------------------------
1 | # Look for csparse; note the difference in the directory specifications!
2 | find_path(CSPARSE_INCLUDE_DIR NAMES cs.h
3 | PATHS
4 | /usr/include/suitesparse
5 | /usr/include
6 | /opt/local/include
7 | /usr/local/include
8 | /sw/include
9 | /usr/include/ufsparse
10 | /opt/local/include/ufsparse
11 | /usr/local/include/ufsparse
12 | /sw/include/ufsparse
13 | PATH_SUFFIXES
14 | suitesparse
15 | )
16 |
17 | find_library(CSPARSE_LIBRARY NAMES cxsparse libcxsparse
18 | PATHS
19 | /usr/lib
20 | /usr/local/lib
21 | /opt/local/lib
22 | /sw/lib
23 | )
24 |
25 | include(FindPackageHandleStandardArgs)
26 | find_package_handle_standard_args(CSparse DEFAULT_MSG
27 | CSPARSE_INCLUDE_DIR CSPARSE_LIBRARY)
28 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/utils/arg_utils.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def infer_type(x): # hacky way to infer type from string args
4 | if not isinstance(x, str):
5 | return x
6 |
7 | try:
8 | x = int(x)
9 | return x
10 | except ValueError:
11 | pass
12 |
13 | try:
14 | x = float(x)
15 | return x
16 | except ValueError:
17 | pass
18 |
19 | return x
20 |
21 |
22 | def parse_unknown(unknown_args):
23 | clean = []
24 | for a in unknown_args:
25 | if "=" in a:
26 | k, v = a.split("=")
27 | clean.extend([k, v])
28 | else:
29 | clean.append(a)
30 |
31 | keys = clean[::2]
32 | values = clean[1::2]
33 | return {k.replace("--", ""): infer_type(v) for k, v in zip(keys, values)}
34 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/README.md:
--------------------------------------------------------------------------------
1 | # MiDaS on Android smartphone by using TensorFlow-lite (TFLite)
2 |
3 |
4 | * Either use Android Studio for compilation.
5 |
6 | * Or use ready to install apk-file:
7 | * Or use URL: https://i.diawi.com/CVb8a9
8 | * Or use QR-code:
9 |
10 | Scan QR-code or open URL -> Press `Install application` -> Press `Download` and wait for download -> Open -> Install -> Open -> Press: Allow MiDaS to take photo and video from the camera While using the APP
11 |
12 | 
13 |
14 | ----
15 |
16 | To use another model, you should convert it to `model_opt.tflite` and place it to the directory: `models\src\main\assets`
17 |
18 |
19 | ----
20 |
21 | Original repository: https://github.com/isl-org/MiDaS
22 |
--------------------------------------------------------------------------------
/cmake_modules/FindCSparse.cmake:
--------------------------------------------------------------------------------
1 | # Look for csparse; note the difference in the directory specifications!
2 | find_path(CSPARSE_INCLUDE_DIR NAMES cs.h
3 | PATHS
4 | /usr/include/suitesparse
5 | /usr/include
6 | /opt/local/include
7 | /usr/local/include
8 | /sw/include
9 | /usr/include/ufsparse
10 | /opt/local/include/ufsparse
11 | /usr/local/include/ufsparse
12 | /sw/include/ufsparse
13 | PATH_SUFFIXES
14 | suitesparse
15 | )
16 |
17 | find_library(CSPARSE_LIBRARY NAMES cxsparse libcxsparse
18 | PATHS
19 | /usr/lib
20 | /usr/local/lib
21 | /opt/local/lib
22 | /sw/lib
23 | )
24 |
25 | include(FindPackageHandleStandardArgs)
26 |
27 | #find_package_handle_standard_args(CSPARSE DEFAULT_MSG
28 | # CSPARSE_INCLUDE_DIR CSPARSE_LIBRARY)
29 |
30 | find_package_handle_standard_args(CSparse DEFAULT_MSG
31 | CSPARSE_INCLUDE_DIR CSPARSE_LIBRARY)
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | android.useAndroidX=true
15 | android.enableJetifier=true
16 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/models/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_support/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/dependencies/g2o/config.h:
--------------------------------------------------------------------------------
1 | #ifndef G2O_CONFIG_H
2 | #define G2O_CONFIG_H
3 |
4 | /* #undef G2O_OPENMP */
5 | /* #undef G2O_SHARED_LIBS */
6 |
7 | // give a warning if Eigen defaults to row-major matrices.
8 | // We internally assume column-major matrices throughout the code.
9 | #ifdef EIGEN_DEFAULT_TO_ROW_MAJOR
10 | # error "g2o requires column major Eigen matrices (see http://eigen.tuxfamily.org/bz/show_bug.cgi?id=422)"
11 | #endif
12 |
13 | /* #undef G2O_SINGLE_PRECISION_MATH */
14 | #ifdef G2O_SINGLE_PRECISION_MATH
15 | #define G2O_NUMBER_FORMAT_STR "%g"
16 |
17 | #ifdef __cplusplus
18 | using number_t = float;
19 | #else
20 | typedef float number_t;
21 | #endif
22 | #else
23 | #define G2O_NUMBER_FORMAT_STR "%lg"
24 |
25 | #ifdef __cplusplus
26 | using number_t = double;
27 | #else
28 | typedef double number_t;
29 | #endif
30 | #endif
31 |
32 | #endif
33 |
--------------------------------------------------------------------------------
/dependencies/g2o/config.h.in:
--------------------------------------------------------------------------------
1 | #ifndef G2O_CONFIG_H
2 | #define G2O_CONFIG_H
3 |
4 | #cmakedefine G2O_OPENMP 1
5 | #cmakedefine G2O_SHARED_LIBS 1
6 |
7 | // give a warning if Eigen defaults to row-major matrices.
8 | // We internally assume column-major matrices throughout the code.
9 | #ifdef EIGEN_DEFAULT_TO_ROW_MAJOR
10 | # error "g2o requires column major Eigen matrices (see http://eigen.tuxfamily.org/bz/show_bug.cgi?id=422)"
11 | #endif
12 |
13 | #cmakedefine G2O_SINGLE_PRECISION_MATH
14 | #ifdef G2O_SINGLE_PRECISION_MATH
15 | #define G2O_NUMBER_FORMAT_STR "%g"
16 |
17 | #ifdef __cplusplus
18 | using number_t = float;
19 | #else
20 | typedef float number_t;
21 | #endif
22 | #else
23 | #define G2O_NUMBER_FORMAT_STR "%lg"
24 |
25 | #ifdef __cplusplus
26 | using number_t = double;
27 | #else
28 | typedef double number_t;
29 | #endif
30 | #endif
31 |
32 | #endif
33 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Cells/InfoCell.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | import UIKit
16 |
17 | /// Table cell for inference result in bottom view.
18 | class InfoCell: UITableViewCell {
19 | @IBOutlet weak var fieldNameLabel: UILabel!
20 | @IBOutlet weak var infoLabel: UILabel!
21 | }
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/midas_cpp/launch/midas_cpp.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/g2o_core_api.h:
--------------------------------------------------------------------------------
1 | /***************************************************************************
2 | * Description: import/export macros for creating DLLS with Microsoft
3 | * compiler. Any exported function needs to be declared with the
4 | * appropriate G2O_XXXX_API macro. Also, there must be separate macros
5 | * for each DLL (arrrrrgh!!!)
6 | *
7 | * 17 Jan 2012
8 | * Email: pupilli@cs.bris.ac.uk
9 | ****************************************************************************/
10 | #ifndef G2O_CORE_API_H
11 | #define G2O_CORE_API_H
12 |
13 | #include "../../config.h"
14 |
15 | #ifdef _MSC_VER
16 | // We are using a Microsoft compiler:
17 | #ifdef G2O_SHARED_LIBS
18 | #ifdef core_EXPORTS
19 | #define G2O_CORE_API __declspec(dllexport)
20 | #else
21 | #define G2O_CORE_API __declspec(dllimport)
22 | #endif
23 | #else
24 | #define G2O_CORE_API
25 | #endif
26 |
27 | #else
28 | // Not Microsoft compiler so set empty definition:
29 | #define G2O_CORE_API
30 | #endif
31 |
32 | #endif // G2O_CORE_API_H
33 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/Dockerfile:
--------------------------------------------------------------------------------
1 | # enables cuda support in docker
2 | FROM nvidia/cuda:10.2-cudnn7-runtime-ubuntu18.04
3 |
4 | # install python 3.6, pip and requirements for opencv-python
5 | # (see https://github.com/NVIDIA/nvidia-docker/issues/864)
6 | RUN apt-get update && apt-get -y install \
7 | python3 \
8 | python3-pip \
9 | libsm6 \
10 | libxext6 \
11 | libxrender-dev \
12 | curl \
13 | && rm -rf /var/lib/apt/lists/*
14 |
15 | # install python dependencies
16 | RUN pip3 install --upgrade pip
17 | RUN pip3 install torch~=1.8 torchvision opencv-python-headless~=3.4 timm
18 |
19 | # copy inference code
20 | WORKDIR /opt/MiDaS
21 | COPY ./midas ./midas
22 | COPY ./*.py ./
23 |
24 | # download model weights so the docker image can be used offline
25 | RUN cd weights && {curl -OL https://github.com/isl-org/MiDaS/releases/download/v3/dpt_hybrid_384.pt; cd -; }
26 | RUN python3 run.py --model_type dpt_hybrid; exit 0
27 |
28 | # entrypoint (dont forget to mount input and output directories)
29 | CMD python3 run.py --model_type dpt_hybrid
30 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/midas/backbones/swin2.py:
--------------------------------------------------------------------------------
1 | import timm
2 |
3 | from .swin_common import _make_swin_backbone
4 |
5 |
6 | def _make_pretrained_swin2l24_384(pretrained, hooks=None):
7 | model = timm.create_model("swinv2_large_window12to24_192to384_22kft1k", pretrained=pretrained)
8 |
9 | hooks = [1, 1, 17, 1] if hooks == None else hooks
10 | return _make_swin_backbone(
11 | model,
12 | hooks=hooks
13 | )
14 |
15 |
16 | def _make_pretrained_swin2b24_384(pretrained, hooks=None):
17 | model = timm.create_model("swinv2_base_window12to24_192to384_22kft1k", pretrained=pretrained)
18 |
19 | hooks = [1, 1, 17, 1] if hooks == None else hooks
20 | return _make_swin_backbone(
21 | model,
22 | hooks=hooks
23 | )
24 |
25 |
26 | def _make_pretrained_swin2t16_256(pretrained, hooks=None):
27 | model = timm.create_model("swinv2_tiny_window16_256", pretrained=pretrained)
28 |
29 | hooks = [1, 1, 5, 1] if hooks == None else hooks
30 | return _make_swin_backbone(
31 | model,
32 | hooks=hooks,
33 | patch_grid=[64, 64]
34 | )
35 |
--------------------------------------------------------------------------------
/VideoFlow/flow_datasets/KITTI/generate_KITTI_list.py:
--------------------------------------------------------------------------------
1 | import os
2 | import math
3 | import random
4 | from glob import glob
5 | import os.path as osp
6 |
7 | split = "testing"
8 | root = "KITTI"
9 |
10 |
11 | root = osp.join(root, split)
12 | images1 = sorted(glob(osp.join(root, 'image_2/*_10.png')))
13 | images2 = sorted(glob(osp.join(root, 'image_2/*_11.png')))
14 |
15 | extra_info = []
16 | flow_list = []
17 | image_list = []
18 |
19 | for img1, img2 in zip(images1, images2):
20 | frame_id = img1.split('/')[-1]
21 | extra_info += [ frame_id+"\n" ]
22 | image_list += [ img1+"\n", img2+"\n" ]
23 |
24 | if split == 'training':
25 | _flow_list = sorted(glob(osp.join(root, 'flow_occ/*_10.png')))
26 | flow_list = [s+"\n" for s in _flow_list]
27 |
28 | print(len(image_list), len(flow_list), len(extra_info))
29 |
30 | with open('KITTI_{}_image.txt'.format(split), 'w') as f:
31 | f.writelines(image_list)
32 |
33 | with open('KITTI_{}_flow.txt'.format(split), 'w') as f:
34 | f.writelines(flow_list)
35 |
36 | with open('KITTI_{}_extra_info.txt'.format(split), 'w') as f:
37 | f.writelines(extra_info)
38 |
--------------------------------------------------------------------------------
/YOLOv8/include/data_struct.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include
5 |
6 |
7 | using MatVector = std::vector;
8 |
9 | #pragma pack(push, n)
10 | struct SegmentedObject {
11 | int classID;
12 | float confidence;
13 | cv::Rect box;
14 | cv::Mat boxMask;
15 | std::vector> maskContoursList;
16 | };
17 | #pragma pack(pop)
18 |
19 | using ImagesSegmentedObject = std::vector;
20 | using BatchSegmentedObject = std::vector;
21 |
22 | struct MaskParams {
23 | //int segChannels = 32;
24 | //int segWidth = 160;
25 | //int segHeight = 160;
26 | int netWidth = 640;
27 | int netHeight = 640;
28 | float maskThreshold = 0.5;
29 | cv::Size srcImgShape;
30 | cv::Vec4d params;
31 | };
32 |
33 | struct DetectedObject_bak {
34 | int classID;
35 | float confidence;
36 | cv::Rect box;
37 | };
38 |
39 | using DetectedObject = SegmentedObject;
40 | using ImagesDetectedObject = std::vector;
41 | using BatchDetectedObject = std::vector;
42 |
--------------------------------------------------------------------------------
/VideoFlow/flow_dataset_mf/convert_HD1K.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 | import os
5 | import pickle
6 |
7 | root = "/mnt/lustre/share/cp/caodongliang/HD1K/"
8 |
9 | image_list = []
10 | flow_list = []
11 |
12 | seq_ix = 0
13 |
14 | while 1:
15 | flows = sorted(glob(os.path.join(root, 'hd1k_flow_gt', 'flow_occ/%06d_*.png' % seq_ix)))
16 | images = sorted(glob(os.path.join(root, 'hd1k_input', 'image_2/%06d_*.png' % seq_ix)))
17 |
18 | if len(flows) == 0:
19 | break
20 |
21 | print(seq_ix, len(flows), images[0], images[-1], "!!!!!!!!!!!!!!")
22 |
23 | for idx in range(len(images)):
24 | images[idx] = images[idx].replace("/mnt/lustre/share/cp/caodongliang/HD1K", "HD1K")
25 | for idx in range(len(flows)):
26 | flows[idx] = flows[idx].replace("/mnt/lustre/share/cp/caodongliang/HD1K", "HD1K")
27 |
28 | seq_ix += 1
29 |
30 | image_list.append(images)
31 | flow_list.append(flows)
32 |
33 | with open("hd1k_png.pkl", 'wb') as f:
34 | pickle.dump(image_list, f)
35 | with open("hd1k_flo.pkl", 'wb') as f:
36 | pickle.dump(flow_list, f)
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/midas_cpp/launch/midas_talker_listener.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/java/org/tensorflow/lite/examples/classification/customview/ResultsView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.customview;
17 |
18 | import java.util.List;
19 | import org.tensorflow.lite.examples.classification.tflite.Classifier.Recognition;
20 |
21 | public interface ResultsView {
22 | public void setResults(final List results);
23 | }
24 |
--------------------------------------------------------------------------------
/VideoFlow/core/utils/misc.py:
--------------------------------------------------------------------------------
1 | import time
2 | import os
3 | import shutil
4 |
5 | def process_transformer_cfg(cfg):
6 | log_dir = ''
7 | if 'critical_params' in cfg:
8 | critical_params = [cfg[key] for key in cfg.critical_params]
9 | for name, param in zip(cfg["critical_params"], critical_params):
10 | log_dir += "{:s}[{:s}]".format(name, str(param))
11 |
12 | return log_dir
13 |
14 | def process_cfg(cfg):
15 | log_dir = 'logs/' + cfg.name + '/' + cfg.network + '/'
16 | critical_params = [cfg.trainer[key] for key in cfg.critical_params]
17 | for name, param in zip(cfg["critical_params"], critical_params):
18 | log_dir += "{:s}[{:s}]".format(name, str(param))
19 |
20 | log_dir += process_transformer_cfg(cfg[cfg.network])
21 |
22 | now = time.localtime()
23 | now_time = '{:02d}_{:02d}_{:02d}_{:02d}'.format(now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min)
24 | log_dir += cfg.suffix + '(' + now_time + ')'
25 | cfg.log_dir = log_dir
26 | os.makedirs(log_dir)
27 |
28 | shutil.copytree('configs', f'{log_dir}/configs')
29 | shutil.copytree('core', f'{log_dir}/core')
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Alexey
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/ZoeDepth/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Alexey
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Midas
3 | This device doesn\'t support Camera2 API.
4 | GPU does not yet supported quantized models.
5 | Model:
6 |
7 | - Float_EfficientNet
8 |
13 |
14 |
15 | Device:
16 |
17 | - GPU
18 | - CPU
19 | - NNAPI
20 |
21 |
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Intel ISL (Intel Intelligent Systems Lab)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/additions/install_ros_noetic_ubuntu_20.sh:
--------------------------------------------------------------------------------
1 | #@title { display-mode: "code" }
2 |
3 | #from http://wiki.ros.org/indigo/Installation/Ubuntu
4 |
5 | #1.2 Setup sources.list
6 | sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
7 |
8 | # 1.3 Setup keys
9 | sudo apt-key adv --keyserver 'hkp://keyserver.ubuntu.com:80' --recv-key C1CF6E31E6BADE8868B172B4F42ED6FBAB17C654
10 |
11 | curl -sSL 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0xC1CF6E31E6BADE8868B172B4F42ED6FBAB17C654' | sudo apt-key add -
12 |
13 | # 1.4 Installation
14 | sudo apt-get update
15 | sudo apt-get upgrade
16 |
17 | # Desktop-Full Install:
18 | sudo apt-get install ros-noetic-desktop-full
19 |
20 | printf "\nsource /opt/ros/noetic/setup.bash\n" >> ~/.bashrc
21 |
22 | # 1.5 Initialize rosdep
23 | sudo rosdep init
24 | rosdep update
25 |
26 |
27 | # 1.7 Getting rosinstall (python)
28 | sudo apt-get install python3-rosinstall
29 | sudo apt-get install python3-catkin-tools
30 | sudo apt-get install python3-rospy
31 | sudo apt-get install python3-rosdep
32 | sudo apt-get install python3-roscd
33 | sudo apt-get install python3-pip
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Constants.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | // =============================================================================
15 |
16 | enum Constants {
17 | // MARK: - Constants related to the image processing
18 | static let bgraPixel = (channels: 4, alphaComponent: 3, lastBgrComponent: 2)
19 | static let rgbPixelChannels = 3
20 | static let maxRGBValue: Float32 = 255.0
21 |
22 | // MARK: - Constants related to the model interperter
23 | static let defaultThreadCount = 2
24 | static let defaultDelegate: Delegates = .CPU
25 | }
26 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
18 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/VideoFlow/configs/kitti.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'BOFNet'
17 | _CN.mixed_precision = False
18 | _CN.filter_epe = False
19 |
20 | _CN.restore_ckpt = "PATH-TO-FINAL"
21 |
22 | _CN.BOFNet = CN()
23 | _CN.BOFNet.pretrain = True
24 | _CN.BOFNet.cnet = 'twins'
25 | _CN.BOFNet.fnet = 'twins'
26 | _CN.BOFNet.gma = 'GMA-SK2'
27 | _CN.BOFNet.corr_fn = "default"
28 | _CN.BOFNet.mixed_precision = False
29 |
30 | _CN.BOFNet.decoder_depth = 12
31 | _CN.BOFNet.critical_params = ["cnet", "fnet", "pretrain", "corr_fn", "mixed_precision"]
32 |
33 | ### TRAINER
34 | _CN.trainer = CN()
35 | _CN.trainer.scheduler = 'OneCycleLR'
36 | _CN.trainer.optimizer = 'adamw'
37 | _CN.trainer.canonical_lr = 12.5e-5
38 | _CN.trainer.adamw_decay = 1e-4
39 | _CN.trainer.clip = 1.0
40 | _CN.trainer.num_steps = 80000
41 | _CN.trainer.epsilon = 1e-8
42 | _CN.trainer.anneal_strategy = 'linear'
43 | def get_cfg():
44 | return _CN.clone()
45 |
--------------------------------------------------------------------------------
/VideoFlow/configs/sintel_submission.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 1
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'BOFNet'
17 |
18 | _CN.restore_ckpt = None
19 |
20 | _CN.model = "VideoFlow_ckpt/BOF_sintel.pth"
21 |
22 | _CN.BOFNet = CN()
23 | _CN.BOFNet.pretrain = True
24 | _CN.BOFNet.cnet = 'twins'
25 | _CN.BOFNet.fnet = 'twins'
26 | _CN.BOFNet.gma = 'GMA-SK2'
27 | _CN.BOFNet.corr_fn = "default"
28 | _CN.BOFNet.corr_levels = 4
29 | _CN.BOFNet.mixed_precision = True
30 |
31 | _CN.BOFNet.decoder_depth = 32
32 | _CN.BOFNet.critical_params = ["cnet", "fnet", "pretrain"]
33 |
34 |
35 | ### TRAINER
36 | _CN.trainer = CN()
37 | _CN.trainer.scheduler = 'OneCycleLR'
38 | _CN.trainer.optimizer = 'adamw'
39 | _CN.trainer.canonical_lr = 12.5e-5
40 | _CN.trainer.adamw_decay = 1e-4
41 | _CN.trainer.clip = 1.0
42 | _CN.trainer.num_steps = 90000
43 | _CN.trainer.epsilon = 1e-8
44 | _CN.trainer.anneal_strategy = 'linear'
45 | def get_cfg():
46 | return _CN.clone()
47 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/midas/backbones/next_vit.py:
--------------------------------------------------------------------------------
1 | import timm
2 |
3 | import torch.nn as nn
4 |
5 | from pathlib import Path
6 | from .utils import activations, forward_default, get_activation
7 |
8 | from ..external.next_vit.classification.nextvit import *
9 |
10 |
11 | def forward_next_vit(pretrained, x):
12 | return forward_default(pretrained, x, "forward")
13 |
14 |
15 | def _make_next_vit_backbone(
16 | model,
17 | hooks=[2, 6, 36, 39],
18 | ):
19 | pretrained = nn.Module()
20 |
21 | pretrained.model = model
22 | pretrained.model.features[hooks[0]].register_forward_hook(get_activation("1"))
23 | pretrained.model.features[hooks[1]].register_forward_hook(get_activation("2"))
24 | pretrained.model.features[hooks[2]].register_forward_hook(get_activation("3"))
25 | pretrained.model.features[hooks[3]].register_forward_hook(get_activation("4"))
26 |
27 | pretrained.activations = activations
28 |
29 | return pretrained
30 |
31 |
32 | def _make_pretrained_next_vit_large_6m(hooks=None):
33 | model = timm.create_model("nextvit_large")
34 |
35 | hooks = [2, 6, 36, 39] if hooks == None else hooks
36 | return _make_next_vit_backbone(
37 | model,
38 | hooks=hooks,
39 | )
40 |
--------------------------------------------------------------------------------
/VideoFlow/configs/sintel.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'BOFNet'
17 | _CN.mixed_precision = False
18 | _CN.filter_epe = False
19 |
20 | _CN.restore_ckpt = "PATH_TO_FINAL/final"
21 |
22 | _CN.BOFNet = CN()
23 | _CN.BOFNet.pretrain = True
24 | _CN.BOFNet.cnet = 'twins'
25 | _CN.BOFNet.fnet = 'twins'
26 | _CN.BOFNet.gma = 'GMA-SK2'
27 | _CN.BOFNet.corr_fn = "default"
28 | _CN.BOFNet.mixed_precision = False
29 |
30 | _CN.BOFNet.decoder_depth = 12
31 | _CN.BOFNet.critical_params = ["cnet", "fnet", "pretrain", "corr_fn", "mixed_precision"]
32 |
33 |
34 | ### TRAINER
35 | _CN.trainer = CN()
36 | _CN.trainer.scheduler = 'OneCycleLR'
37 | _CN.trainer.optimizer = 'adamw'
38 | _CN.trainer.canonical_lr = 12.5e-5
39 | _CN.trainer.adamw_decay = 1e-4
40 | _CN.trainer.clip = 1.0
41 | _CN.trainer.num_steps = 120000
42 | _CN.trainer.epsilon = 1e-8
43 | _CN.trainer.anneal_strategy = 'linear'
44 | def get_cfg():
45 | return _CN.clone()
46 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/models/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 | apply plugin: 'de.undercouch.download'
3 |
4 | android {
5 | compileSdkVersion 28
6 | buildToolsVersion "28.0.0"
7 |
8 | defaultConfig {
9 | minSdkVersion 21
10 | targetSdkVersion 28
11 | versionCode 1
12 | versionName "1.0"
13 |
14 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
15 |
16 | }
17 |
18 | buildTypes {
19 | release {
20 | minifyEnabled false
21 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
22 | }
23 | }
24 |
25 | aaptOptions {
26 | noCompress "tflite"
27 | }
28 |
29 | lintOptions {
30 | checkReleaseBuilds false
31 | // Or, if you prefer, you can continue to check for errors in release builds,
32 | // but continue the build even when errors are found:
33 | abortOnError false
34 | }
35 | }
36 |
37 | // Download default models; if you wish to use your own models then
38 | // place them in the "assets" directory and comment out this line.
39 | project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
40 | apply from:'download.gradle'
41 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/data/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/base_models/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 |
--------------------------------------------------------------------------------
/VideoFlow/configs/things.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.8
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'BOFNet'
17 | _CN.mixed_precision = False
18 | _CN.filter_epe = False
19 |
20 | _CN.restore_ckpt = None
21 |
22 | _CN.BOFNet = CN()
23 | _CN.BOFNet.pretrain = True
24 | _CN.BOFNet.gma = 'GMA-SK2'
25 | _CN.BOFNet.cnet = 'twins'
26 | _CN.BOFNet.fnet = 'twins'
27 | _CN.BOFNet.corr_fn = 'default'
28 | _CN.BOFNet.corr_levels = 4
29 | _CN.BOFNet.mixed_precision = False
30 |
31 | _CN.BOFNet.decoder_depth = 12
32 | _CN.BOFNet.critical_params = ["cnet", "fnet", "pretrain", 'corr_fn', "gma", "corr_levels", "decoder_depth", "mixed_precision"]
33 |
34 | ### TRAINER
35 | _CN.trainer = CN()
36 | _CN.trainer.scheduler = 'OneCycleLR'
37 | _CN.trainer.optimizer = 'adamw'
38 | _CN.trainer.canonical_lr = 25e-5
39 | _CN.trainer.adamw_decay = 1e-4
40 | _CN.trainer.clip = 1.0
41 | _CN.trainer.num_steps = 120000
42 | _CN.trainer.epsilon = 1e-8
43 | _CN.trainer.anneal_strategy = 'linear'
44 | def get_cfg():
45 | return _CN.clone()
46 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/additions/install_ros_melodic_ubuntu_17_18.sh:
--------------------------------------------------------------------------------
1 | #@title { display-mode: "code" }
2 |
3 | #from http://wiki.ros.org/indigo/Installation/Ubuntu
4 |
5 | #1.2 Setup sources.list
6 | sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
7 |
8 | # 1.3 Setup keys
9 | sudo apt-key adv --keyserver 'hkp://keyserver.ubuntu.com:80' --recv-key C1CF6E31E6BADE8868B172B4F42ED6FBAB17C654
10 | sudo apt-key adv --keyserver 'hkp://ha.pool.sks-keyservers.net:80' --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
11 |
12 | curl -sSL 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0xC1CF6E31E6BADE8868B172B4F42ED6FBAB17C654' | sudo apt-key add -
13 |
14 | # 1.4 Installation
15 | sudo apt-get update
16 | sudo apt-get upgrade
17 |
18 | # Desktop-Full Install:
19 | sudo apt-get install ros-melodic-desktop-full
20 |
21 | printf "\nsource /opt/ros/melodic/setup.bash\n" >> ~/.bashrc
22 |
23 | # 1.5 Initialize rosdep
24 | sudo rosdep init
25 | rosdep update
26 |
27 |
28 | # 1.7 Getting rosinstall (python)
29 | sudo apt-get install python-rosinstall
30 | sudo apt-get install python-catkin-tools
31 | sudo apt-get install python-rospy
32 | sudo apt-get install python-rosdep
33 | sudo apt-get install python-roscd
34 | sudo apt-get install python-pip
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Camera Feed/PreviewView.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | import UIKit
16 | import AVFoundation
17 |
18 | /// The camera frame is displayed on this view.
19 | class PreviewView: UIView {
20 | var previewLayer: AVCaptureVideoPreviewLayer {
21 | guard let layer = layer as? AVCaptureVideoPreviewLayer else {
22 | fatalError("Layer expected is of type VideoPreviewLayer")
23 | }
24 | return layer
25 | }
26 |
27 | var session: AVCaptureSession? {
28 | get {
29 | return previewLayer.session
30 | }
31 | set {
32 | previewLayer.session = newValue
33 | }
34 | }
35 |
36 | override class var layerClass: AnyClass {
37 | return AVCaptureVideoPreviewLayer.self
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/zoedepth/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 | from .zoedepth_v1 import ZoeDepth
26 |
27 | all_versions = {
28 | "v1": ZoeDepth,
29 | }
30 |
31 | get_version = lambda v : all_versions[v]
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/layout/tfe_ic_camera_connection_fragment.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
25 |
26 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/zoedepth_nk/__init__.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 | from .zoedepth_nk_v1 import ZoeDepthNK
26 |
27 | all_versions = {
28 | "v1": ZoeDepthNK,
29 | }
30 |
31 | get_version = lambda v : all_versions[v]
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/solvers/g2o_csparse_extension_api.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2012 Rainer Kuemmerle
3 | //
4 | // g2o is free software: you can redistribute it and/or modify
5 | // it under the terms of the GNU Lesser General Public License as published
6 | // by the Free Software Foundation, either version 3 of the License, or
7 | // (at your option) any later version.
8 | //
9 | // g2o is distributed in the hope that it will be useful,
10 | // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | // GNU Lesser General Public License for more details.
13 | //
14 | // You should have received a copy of the GNU Lesser General Public License
15 | // along with this program. If not, see .
16 |
17 | #ifndef G2O_CSPARSE_EXTENSION_API_H
18 | #define G2O_CSPARSE_EXTENSION_API_H
19 |
20 | #include "../../config.h"
21 |
22 | #ifdef _MSC_VER
23 | // We are using a Microsoft compiler:
24 | #ifdef G2O_LGPL_SHARED_LIBS
25 | #ifdef csparse_extension_EXPORTS
26 | #define G2O_CSPARSE_EXTENSION_API __declspec(dllexport)
27 | #else
28 | #define G2O_CSPARSE_EXTENSION_API __declspec(dllimport)
29 | #endif
30 | #else
31 | #define G2O_CSPARSE_EXTENSION_API
32 | #endif
33 |
34 | #else
35 | // Not Microsoft compiler so set empty definition:
36 | #define G2O_CSPARSE_EXTENSION_API
37 | #endif
38 |
39 | #endif // G2O_CSPARSE_API_H
40 |
--------------------------------------------------------------------------------
/YOLOv8/include/detector_opencv_dnn.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include "data_struct.h"
5 |
6 |
7 | class Detector_OpenCV_DNN
8 | {
9 | public:
10 | Detector_OpenCV_DNN();
11 |
12 | bool LoadModel(std::string& modelPath);
13 | BatchDetectedObject Run(MatVector& srcImgList);
14 |
15 | void setClassNames(std::vector newClassNamesList);
16 | void setBatchSize(int newBatch);
17 | void setInputSize(cv::Size newInputSize);
18 | std::string getClassName(int classId);
19 |
20 | void setDynamicClassNames(std::vector classNamesDynamicList);
21 | bool whetherInDynamicClass(std::string className);
22 |
23 | private:
24 | cv::dnn::Net model;
25 | float _classThreshold = 0.25;
26 | float _nmsThreshold = 0.45;
27 |
28 | void LetterBox(const cv::Mat& image,
29 | cv::Mat& outImage,
30 | cv::Vec4d& params,
31 | const cv::Size& newShape = cv::Size(640, 640),
32 | bool autoShape = false,
33 | bool scaleFill = false,
34 | bool scaleUp = true,
35 | int stride = 32,
36 | const cv::Scalar& color = cv::Scalar(114, 114, 114));
37 |
38 | std::vector _classNamesList;
39 | std::vector _classNamesDynamicList;
40 | int _batchSize = 1;
41 | cv::Size _inputSize = cv::Size(640, 640);
42 |
43 | };
44 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | import UIKit
16 |
17 | @UIApplicationMain
18 | class AppDelegate: UIResponder, UIApplicationDelegate {
19 |
20 | var window: UIWindow?
21 |
22 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
23 | return true
24 | }
25 |
26 | func applicationWillResignActive(_ application: UIApplication) {
27 | }
28 |
29 | func applicationDidEnterBackground(_ application: UIApplication) {
30 | }
31 |
32 | func applicationWillEnterForeground(_ application: UIApplication) {
33 | }
34 |
35 | func applicationDidBecomeActive(_ application: UIApplication) {
36 | }
37 |
38 | func applicationWillTerminate(_ application: UIApplication) {
39 | }
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/VideoFlow/configs/things_multiframes.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.8
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'MOFNetStack'
17 | _CN.mixed_precision = True
18 | _CN.input_frames = 5
19 | _CN.filter_epe = False
20 |
21 | _CN.restore_ckpt = None
22 |
23 | _CN.MOFNetStack = CN()
24 | _CN.MOFNetStack.pretrain = True
25 | _CN.MOFNetStack.Tfusion = 'stack'
26 | _CN.MOFNetStack.cnet = 'twins'
27 | _CN.MOFNetStack.fnet = 'twins'
28 | _CN.MOFNetStack.down_ratio = 8
29 | _CN.MOFNetStack.feat_dim = 256
30 | _CN.MOFNetStack.corr_fn = 'default'
31 | _CN.MOFNetStack.corr_levels = 4
32 | _CN.MOFNetStack.mixed_precision = True
33 | _CN.MOFNetStack.context_3D = False
34 |
35 | _CN.MOFNetStack.decoder_depth = 6
36 | _CN.MOFNetStack.critical_params = ["cnet", "fnet", "pretrain", "Tfusion", "decoder_depth", "mixed_precision", "down_ratio", "feat_dim"]
37 |
38 | ### TRAINER
39 | _CN.trainer = CN()
40 | _CN.trainer.scheduler = 'OneCycleLR'
41 | _CN.trainer.optimizer = 'adamw'
42 | _CN.trainer.canonical_lr = 25e-5
43 | _CN.trainer.adamw_decay = 1e-4
44 | _CN.trainer.clip = 1.0
45 | _CN.trainer.num_steps = 125000
46 | _CN.trainer.epsilon = 1e-8
47 | _CN.trainer.anneal_strategy = 'linear'
48 | def get_cfg():
49 | return _CN.clone()
50 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | This app will use camera to continuously estimate the depth map.
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 |
37 | UISupportedInterfaceOrientations~ipad
38 |
39 | UIInterfaceOrientationPortrait
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/VideoFlow/flow_datasets/hd1k_three_frames/convert_HD1K.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 | import os
5 |
6 | root = "/mnt/lustre/share/cp/caodongliang/HD1K/"
7 |
8 | image_list = []
9 | flow_list = []
10 |
11 | seq_ix = 0
12 |
13 | while 1:
14 | flows = sorted(glob(os.path.join(root, 'hd1k_flow_gt', 'flow_occ/%06d_*.png' % seq_ix)))
15 | images = sorted(glob(os.path.join(root, 'hd1k_input', 'image_2/%06d_*.png' % seq_ix)))
16 |
17 | if len(flows) == 0:
18 | break
19 |
20 | print(seq_ix, len(flows), images[0], images[-1], "!!!!!!!!!!!!!!")
21 |
22 | for i in range(len(images)-1):
23 | if i==0:
24 | image_list.append(images[0])
25 | else:
26 | image_list.append(images[i-1])
27 |
28 | image_list.append(images[i])
29 | image_list.append(images[i+1])
30 |
31 | flow_list.append(flows[i])
32 |
33 | seq_ix += 1
34 |
35 | for idx in range(len(image_list)):
36 | image_list[idx] = image_list[idx].replace("/mnt/lustre/share/cp/caodongliang/HD1K", "HD1K") + "\n"
37 | for idx in range(len(flow_list)):
38 | flow_list[idx] = flow_list[idx].replace("/mnt/lustre/share/cp/caodongliang/HD1K", "HD1K") + "\n"
39 |
40 | with open(osp.join("hd1k_three_frames", "hd1k"+"_image.txt"), 'w') as f:
41 | f.writelines(image_list)
42 | print(len(image_list))
43 | with open(osp.join("hd1k_three_frames", "hd1k"+"_flo.txt"), 'w') as f:
44 | f.writelines(flow_list)
45 | print(len(flow_list))
46 |
--------------------------------------------------------------------------------
/dependencies/g2o/license-bsd.txt:
--------------------------------------------------------------------------------
1 | g2o - General Graph Optimization
2 | Copyright (C) 2011 Rainer Kuemmerle, Giorgio Grisetti, Hauke Strasdat,
3 | Kurt Konolige, and Wolfram Burgard
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are
8 | met:
9 |
10 | * Redistributions of source code must retain the above copyright notice,
11 | this list of conditions and the following disclaimer.
12 | * Redistributions in binary form must reproduce the above copyright
13 | notice, this list of conditions and the following disclaimer in the
14 | documentation and/or other materials provided with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
17 | IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
19 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
22 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
24 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
25 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
28 |
--------------------------------------------------------------------------------
/VideoFlow/flow_dataset_mf/convert_sintel.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 | import os
5 |
6 | import pickle
7 |
8 | root = "/mnt/lustre/share/cp/caodongliang/MPI-Sintel/"
9 |
10 | for split in ['training']:
11 | for dstype in ['clean', 'final']:
12 | image_list = []
13 | flow_list = []
14 | extra_info_list = []
15 |
16 | flow_root = osp.join(root, split, 'flow')
17 | image_root = osp.join(root, split, dstype)
18 |
19 | for scene in os.listdir(image_root):
20 | images = sorted(glob(osp.join(image_root, scene, '*.png')))
21 | flows = sorted(glob(osp.join(flow_root, scene, '*.flo')))
22 |
23 | for idx in range(len(images)):
24 | images[idx] = images[idx].replace("/mnt/lustre/share/cp/caodongliang/MPI-Sintel", "Sintel") + "\n"
25 | for idx in range(len(flows)):
26 | flows[idx] = flows[idx].replace("/mnt/lustre/share/cp/caodongliang/MPI-Sintel", "Sintel") + "\n"
27 |
28 | image_list.append(images)
29 | flow_list.append(flows)
30 | extra_info_list.append(scene)
31 |
32 | with open("sintel_training_"+dstype+"_png.pkl", 'wb') as f:
33 | pickle.dump(image_list, f)
34 | with open("sintel_training_"+dstype+"_flo.pkl", 'wb') as f:
35 | pickle.dump(flow_list, f)
36 | with open("sintel_training_scene.pkl", 'wb') as f:
37 | pickle.dump(extra_info_list, f)
38 |
39 |
40 |
--------------------------------------------------------------------------------
/include/Optimizer.h:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is part of VDO-SLAM.
3 | *
4 | * Copyright (C) 2019-2020 Jun Zhang (The Australian National University)
5 | * For more information see
6 | *
7 | **/
8 |
9 | #ifndef OPTIMIZER_H
10 | #define OPTIMIZER_H
11 |
12 | #include "Map.h"
13 | #include "Frame.h"
14 | #include "dependencies/g2o/g2o/types/types_six_dof_expmap.h"
15 |
16 | namespace VDO_SLAM
17 | {
18 |
19 | using namespace std;
20 |
21 | class Optimizer
22 | {
23 | public:
24 |
25 | int static PoseOptimizationNew(Frame *pCurFrame, Frame *pLastFrame, vector &TemperalMatch);
26 | int static PoseOptimizationFlow2Cam(Frame *pCurFrame, Frame *pLastFrame, vector &TemperalMatch);
27 | cv::Mat static PoseOptimizationObjMot(Frame *pCurFrame, Frame *pLastFrame, const vector &ObjId, std::vector &InlierID);
28 | cv::Mat static PoseOptimizationFlow2(Frame *pCurFrame, Frame *pLastFrame, const vector &ObjId, std::vector &InlierID);
29 | void static FullBatchOptimization(Map* pMap, const cv::Mat Calib_K);
30 | void static PartialBatchOptimization(Map* pMap, const cv::Mat Calib_K, const int WINDOW_SIZE);
31 | cv::Mat static Get3DinWorld(const cv::KeyPoint &Feats2d, const float &Dpts, const cv::Mat &Calib_K, const cv::Mat &CameraPose);
32 | cv::Mat static Get3DinCamera(const cv::KeyPoint &Feats2d, const float &Dpts, const cv::Mat &Calib_K);
33 |
34 | };
35 |
36 | } //namespace VDO_SLAM
37 |
38 | #endif // OPTIMIZER_H
39 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 28
5 | buildToolsVersion "28.0.0"
6 |
7 | defaultConfig {
8 | minSdkVersion 21
9 | targetSdkVersion 28
10 | versionCode 1
11 | versionName "1.0"
12 |
13 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
14 |
15 | }
16 |
17 | buildTypes {
18 | release {
19 | minifyEnabled false
20 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 | compileOptions {
24 | sourceCompatibility = '1.8'
25 | targetCompatibility = '1.8'
26 | }
27 | aaptOptions {
28 | noCompress "tflite"
29 | }
30 |
31 | lintOptions {
32 | checkReleaseBuilds false
33 | // Or, if you prefer, you can continue to check for errors in release builds,
34 | // but continue the build even when errors are found:
35 | abortOnError false
36 | }
37 | }
38 |
39 | dependencies {
40 | implementation fileTree(dir: 'libs', include: ['*.jar'])
41 | implementation project(":models")
42 | implementation 'androidx.appcompat:appcompat:1.1.0'
43 |
44 | // Build off of nightly TensorFlow Lite Task Library
45 | implementation('org.tensorflow:tensorflow-lite-task-vision:0.0.0-nightly') { changing = true }
46 | implementation('org.tensorflow:tensorflow-lite-metadata:0.0.0-nightly') { changing = true }
47 | }
48 |
--------------------------------------------------------------------------------
/VideoFlow/configs/multiframes_sintel_submission.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'MOFNetStack'
17 |
18 | _CN.model = 'VideoFlow_ckpt/MOF_sintel.pth'
19 |
20 | _CN.input_frames = 5
21 |
22 | _CN.restore_ckpt = None
23 |
24 | ################################################
25 | ################################################
26 | _CN.MOFNetStack = CN()
27 | _CN.MOFNetStack.pretrain = True
28 | _CN.MOFNetStack.Tfusion = 'stack'
29 | _CN.MOFNetStack.cnet = 'twins'
30 | _CN.MOFNetStack.fnet = 'twins'
31 | _CN.MOFNetStack.down_ratio = 8
32 | _CN.MOFNetStack.feat_dim = 256
33 | _CN.MOFNetStack.corr_fn = 'default'
34 | _CN.MOFNetStack.corr_levels = 4
35 | _CN.MOFNetStack.mixed_precision = True
36 | _CN.MOFNetStack.context_3D = False
37 |
38 | _CN.MOFNetStack.decoder_depth = 32
39 | _CN.MOFNetStack.critical_params = ["cnet", "fnet", "pretrain", 'corr_fn', "Tfusion", "corr_levels", "decoder_depth", "mixed_precision"]
40 |
41 | ### TRAINER
42 | _CN.trainer = CN()
43 | _CN.trainer.scheduler = 'OneCycleLR'
44 | _CN.trainer.optimizer = 'adamw'
45 | _CN.trainer.canonical_lr = 12.5e-5
46 | _CN.trainer.adamw_decay = 1e-4
47 | _CN.trainer.clip = 1.0
48 | _CN.trainer.num_steps = 90000
49 | _CN.trainer.epsilon = 1e-8
50 | _CN.trainer.anneal_strategy = 'linear'
51 | def get_cfg():
52 | return _CN.clone()
53 |
--------------------------------------------------------------------------------
/VideoFlow/alt_cuda_corr/correlation.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | // CUDA forward declarations
5 | std::vector corr_cuda_forward(
6 | torch::Tensor fmap1,
7 | torch::Tensor fmap2,
8 | torch::Tensor coords,
9 | int radius);
10 |
11 | std::vector corr_cuda_backward(
12 | torch::Tensor fmap1,
13 | torch::Tensor fmap2,
14 | torch::Tensor coords,
15 | torch::Tensor corr_grad,
16 | int radius);
17 |
18 | // C++ interface
19 | #define CHECK_CUDA(x) TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor")
20 | #define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be contiguous")
21 | #define CHECK_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x)
22 |
23 | std::vector corr_forward(
24 | torch::Tensor fmap1,
25 | torch::Tensor fmap2,
26 | torch::Tensor coords,
27 | int radius) {
28 | CHECK_INPUT(fmap1);
29 | CHECK_INPUT(fmap2);
30 | CHECK_INPUT(coords);
31 |
32 | return corr_cuda_forward(fmap1, fmap2, coords, radius);
33 | }
34 |
35 |
36 | std::vector corr_backward(
37 | torch::Tensor fmap1,
38 | torch::Tensor fmap2,
39 | torch::Tensor coords,
40 | torch::Tensor corr_grad,
41 | int radius) {
42 | CHECK_INPUT(fmap1);
43 | CHECK_INPUT(fmap2);
44 | CHECK_INPUT(coords);
45 | CHECK_INPUT(corr_grad);
46 |
47 | return corr_cuda_backward(fmap1, fmap2, coords, corr_grad, radius);
48 | }
49 |
50 |
51 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
52 | m.def("forward", &corr_forward, "CORR forward");
53 | m.def("backward", &corr_backward, "CORR backward");
54 | }
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_support/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 28
5 | buildToolsVersion "28.0.0"
6 |
7 | defaultConfig {
8 | minSdkVersion 21
9 | targetSdkVersion 28
10 | versionCode 1
11 | versionName "1.0"
12 |
13 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
14 |
15 | }
16 |
17 | buildTypes {
18 | release {
19 | minifyEnabled false
20 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 |
24 | aaptOptions {
25 | noCompress "tflite"
26 | }
27 |
28 | lintOptions {
29 | checkReleaseBuilds false
30 | // Or, if you prefer, you can continue to check for errors in release builds,
31 | // but continue the build even when errors are found:
32 | abortOnError false
33 | }
34 | }
35 |
36 | dependencies {
37 | implementation fileTree(dir: 'libs', include: ['*.jar'])
38 | implementation project(":models")
39 | implementation 'androidx.appcompat:appcompat:1.1.0'
40 |
41 | // Build off of nightly TensorFlow Lite
42 | implementation('org.tensorflow:tensorflow-lite:0.0.0-nightly') { changing = true }
43 | implementation('org.tensorflow:tensorflow-lite-gpu:0.0.0-nightly') { changing = true }
44 | implementation('org.tensorflow:tensorflow-lite-support:0.0.0-nightly') { changing = true }
45 | // Use local TensorFlow library
46 | // implementation 'org.tensorflow:tensorflow-lite-local:0.0.0'
47 | }
48 |
--------------------------------------------------------------------------------
/VideoFlow/configs/sintel_multiframes.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'MOFNetStack'
17 |
18 | _CN.restore_ckpt = "PATH_TO_FINAL/final"
19 |
20 | _CN.mixed_precision = True
21 | _CN.input_frames = 5
22 | _CN.filter_epe = False
23 |
24 | ###################################################
25 | ###################################################
26 | _CN.MOFNetStack = CN()
27 | _CN.MOFNetStack.pretrain = True
28 | _CN.MOFNetStack.Tfusion = 'stack'
29 | _CN.MOFNetStack.cnet = 'twins'
30 | _CN.MOFNetStack.fnet = 'twins'
31 | _CN.MOFNetStack.down_ratio = 8
32 | _CN.MOFNetStack.feat_dim = 256
33 | _CN.MOFNetStack.corr_fn = 'default'
34 | _CN.MOFNetStack.corr_levels = 4
35 | _CN.MOFNetStack.mixed_precision = True
36 | _CN.MOFNetStack.context_3D = False
37 |
38 | _CN.MOFNetStack.decoder_depth = 12
39 | _CN.MOFNetStack.critical_params = ["cnet", "fnet", "pretrain", 'corr_fn', "Tfusion", "corr_levels", "decoder_depth", "mixed_precision"]
40 |
41 |
42 | ### TRAINER
43 | _CN.trainer = CN()
44 | _CN.trainer.scheduler = 'OneCycleLR'
45 | _CN.trainer.optimizer = 'adamw'
46 | _CN.trainer.canonical_lr = 12.5e-5
47 | _CN.trainer.adamw_decay = 1e-4
48 | _CN.trainer.clip = 1.0
49 | _CN.trainer.num_steps = 40000
50 | _CN.trainer.epsilon = 1e-8
51 | _CN.trainer.anneal_strategy = 'linear'
52 | def get_cfg():
53 | return _CN.clone()
54 |
--------------------------------------------------------------------------------
/VideoFlow/configs/kitti_multiframes.py:
--------------------------------------------------------------------------------
1 | from yacs.config import CfgNode as CN
2 | _CN = CN()
3 |
4 | _CN.name = ''
5 | _CN.suffix =''
6 | _CN.gamma = 0.85
7 | _CN.max_flow = 400
8 | _CN.batch_size = 8
9 | _CN.sum_freq = 100
10 | _CN.val_freq = 100000000
11 | _CN.image_size = [432, 960]
12 | _CN.add_noise = False
13 | _CN.use_smoothl1 = False
14 | _CN.critical_params = []
15 |
16 | _CN.network = 'MOFNetStack'
17 |
18 | _CN.restore_ckpt = "PATH-TO-FINAL/final"
19 |
20 | _CN.mixed_precision = False
21 | _CN.input_frames = 5
22 | _CN.filter_epe = False
23 |
24 | ###################################################
25 | ###################################################
26 | _CN.MOFNetStack = CN()
27 | _CN.MOFNetStack.pretrain = True
28 | _CN.MOFNetStack.Tfusion = 'stack'
29 | _CN.MOFNetStack.cnet = 'twins'
30 | _CN.MOFNetStack.fnet = 'twins'
31 | _CN.MOFNetStack.down_ratio = 8
32 | _CN.MOFNetStack.feat_dim = 256
33 | _CN.MOFNetStack.corr_fn = 'default'
34 | _CN.MOFNetStack.corr_levels = 4
35 | _CN.MOFNetStack.mixed_precision = False
36 | _CN.MOFNetStack.context_3D = False
37 | _CN.MOFNetStack.GMA_MF = False
38 |
39 | _CN.MOFNetStack.decoder_depth = 12
40 | _CN.MOFNetStack.critical_params = ["cnet", "fnet", "pretrain", 'corr_fn', "Tfusion", "corr_levels", "decoder_depth", "mixed_precision", "GMA_MF"]
41 |
42 | ### TRAINER
43 | _CN.trainer = CN()
44 | _CN.trainer.scheduler = 'OneCycleLR'
45 | _CN.trainer.optimizer = 'adamw'
46 | _CN.trainer.canonical_lr = 12.5e-5
47 | _CN.trainer.adamw_decay = 1e-4
48 | _CN.trainer.clip = 1.0
49 | _CN.trainer.num_steps = 25000
50 | _CN.trainer.epsilon = 1e-8
51 | _CN.trainer.anneal_strategy = 'linear'
52 | def get_cfg():
53 | return _CN.clone()
54 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/midas_cpp/scripts/talker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 |
4 | import roslib
5 | #roslib.load_manifest('my_package')
6 | import sys
7 | import rospy
8 | import cv2
9 | from std_msgs.msg import String
10 | from sensor_msgs.msg import Image
11 | from cv_bridge import CvBridge, CvBridgeError
12 |
13 |
14 | def talker():
15 | rospy.init_node('talker', anonymous=True)
16 |
17 | use_camera = rospy.get_param('~use_camera', False)
18 | input_video_file = rospy.get_param('~input_video_file','test.mp4')
19 | # rospy.loginfo(f"Talker - params: use_camera={use_camera}, input_video_file={input_video_file}")
20 |
21 | # rospy.loginfo("Talker: Trying to open a video stream")
22 | if use_camera == True:
23 | cap = cv2.VideoCapture(0)
24 | else:
25 | cap = cv2.VideoCapture(input_video_file)
26 |
27 | pub = rospy.Publisher('image_topic', Image, queue_size=1)
28 | rate = rospy.Rate(30) # 30hz
29 | bridge = CvBridge()
30 |
31 | while not rospy.is_shutdown():
32 | ret, cv_image = cap.read()
33 | if ret==False:
34 | print("Talker: Video is over")
35 | rospy.loginfo("Video is over")
36 | return
37 |
38 | try:
39 | image = bridge.cv2_to_imgmsg(cv_image, "bgr8")
40 | except CvBridgeError as e:
41 | rospy.logerr("Talker: cv2image conversion failed: ", e)
42 | print(e)
43 | continue
44 |
45 | rospy.loginfo("Talker: Publishing frame")
46 | pub.publish(image)
47 | rate.sleep()
48 |
49 | if __name__ == '__main__':
50 | try:
51 | talker()
52 | except rospy.ROSInterruptException:
53 | pass
54 |
--------------------------------------------------------------------------------
/YOLOv8/bytetrack/include/STrack.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include "kalmanFilter.h"
5 | #include
6 | #include
7 |
8 | using namespace cv;
9 | using namespace std;
10 |
11 | namespace byte_track
12 | {
13 | enum TrackState { New = 0, Tracked, Lost, Removed };
14 |
15 | class STrack
16 | {
17 | public:
18 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
19 | STrack(vector tlwh_, float score, int classID=-1);
20 | ~STrack();
21 |
22 | vector static tlbr_to_tlwh(vector &tlbr);
23 | void static multi_predict(vector> &stracks, KalmanFilter &kalman_filter);
24 | void static_tlwh();
25 | void static_tlbr();
26 | vector tlwh_to_xyah(vector tlwh_tmp);
27 | vector to_xyah();
28 | void mark_lost();
29 | void mark_removed();
30 | int next_id();
31 | int end_frame();
32 |
33 | void activate(KalmanFilter &kalman_filter, int frame_id);
34 | void re_activate(std::shared_ptr& new_track, int frame_id, bool new_id = false);
35 | void update(std::shared_ptr& new_track, int frame_id);
36 |
37 | public:
38 | bool is_activated;
39 | int track_id;
40 | int state;
41 |
42 | vector _tlwh;
43 | vector tlwh;
44 | vector tlbr;
45 | int frame_id;
46 | int tracklet_len;
47 | int start_frame;
48 |
49 | int classID;
50 |
51 | KAL_MEAN mean;
52 | KAL_COVA covariance;
53 | float score;
54 |
55 | private:
56 | KalmanFilter kalman_filter;
57 | };
58 |
59 | //using STrackPtr = std::shared_ptr;
60 | typedef std::shared_ptr STrackPtr;
61 | }
--------------------------------------------------------------------------------
/include/cvplot/color.h:
--------------------------------------------------------------------------------
1 | #ifndef CVPLOT_COLOR_H
2 | #define CVPLOT_COLOR_H
3 |
4 | #include
5 |
6 | namespace cvplot {
7 |
8 | struct Color {
9 | uint8_t r, g, b, a;
10 | Color(uint8_t r, uint8_t g, uint8_t b, uint8_t a = 255)
11 | : r(r), g(g), b(b), a(a) {}
12 | Color(const uint8_t *rgb, uint8_t a = 255)
13 | : Color(rgb[0], rgb[1], rgb[2], a) {}
14 | Color() : Color(0, 0, 0) {}
15 |
16 | Color alpha(uint8_t alpha) const;
17 | Color gamma(float gamma) const;
18 | float hue() const;
19 |
20 | static Color gray(uint8_t v);
21 | static Color hue(float hue);
22 | static Color cos(float hue);
23 | static Color index(uint8_t index, uint8_t density = 16, float avoid = 2.f,
24 | float range = 2.f);
25 | static Color hash(const std::string &seed);
26 | static Color uniq(const std::string &name);
27 | };
28 |
29 | static const Color Red = Color::hue(0.f);
30 | static const Color Orange = Color::hue(.5f);
31 | static const Color Yellow = Color::hue(1.f);
32 | static const Color Lawn = Color::hue(1.5f);
33 | static const Color Green = Color::hue(2.f);
34 | static const Color Aqua = Color::hue(2.5f);
35 | static const Color Cyan = Color::hue(3.f);
36 | static const Color Sky = Color::hue(3.5f);
37 | static const Color Blue = Color::hue(4.f);
38 | static const Color Purple = Color::hue(4.5f);
39 | static const Color Magenta = Color::hue(5.f);
40 | static const Color Pink = Color::hue(5.5f);
41 | static const Color Black = Color::gray(0);
42 | static const Color Dark = Color::gray(32);
43 | static const Color Gray = Color::gray(128);
44 | static const Color Light = Color::gray(223);
45 | static const Color White = Color::gray(255);
46 |
47 | } // namespace cvplot
48 |
49 | #endif // CVPLOT_COLOR_H
50 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/parameter.cpp:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #include "parameter.h"
28 |
29 | namespace g2o {
30 |
31 | Parameter::Parameter() : _id(-1)
32 | {
33 | }
34 |
35 | void Parameter::setId(int id_)
36 | {
37 | _id = id_;
38 | }
39 |
40 | } // end namespace
41 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/types/types_dyn_slam3d.h:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is part of VDO-SLAM.
3 | *
4 | * Copyright (C) 2019-2020 Jun Zhang (The Australian National University)
5 | * For more information see
6 | *
7 | **/
8 |
9 | #ifndef G2O_DYNAMIC_SLAM3D
10 | #define G2O_DYNAMIC_SLAM3D
11 |
12 | #include "vertex_se3.h"
13 | #include "edge_se3.h"
14 | #include "vertex_pointxyz.h"
15 | #include "../core/base_multi_edge.h"
16 | #include "../core/base_unary_edge.h"
17 |
18 | namespace g2o {
19 |
20 | namespace types_dyn_slam3d {
21 | void init();
22 | }
23 |
24 | class LandmarkMotionTernaryEdge: public BaseMultiEdge<3,Vector3>
25 | {
26 | public:
27 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
28 | LandmarkMotionTernaryEdge();
29 |
30 | virtual bool read(std::istream& is);
31 | virtual bool write(std::ostream& os) const;
32 | void computeError();
33 | void linearizeOplus();
34 |
35 | virtual void setMeasurement(const Vector3& m){
36 | _measurement = m;
37 | }
38 |
39 | private:
40 | Eigen::Matrix J;
41 |
42 | };
43 |
44 |
45 | class EdgeSE3Altitude: public BaseUnaryEdge<1, double, VertexSE3>
46 | {
47 | public:
48 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
49 | EdgeSE3Altitude();
50 |
51 | virtual bool read(std::istream& is);
52 | virtual bool write(std::ostream& os) const;
53 | void computeError();
54 | void linearizeOplus();
55 |
56 | virtual void setMeasurement(const double& m){
57 | _measurement = m;
58 | }
59 |
60 | private:
61 | Eigen::Matrix J;
62 | };
63 |
64 |
65 | } // end namespace
66 |
67 | #endif
68 |
--------------------------------------------------------------------------------
/include/Converter.h:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is part of VDO-SLAM.
3 | *
4 | * Copyright (C) 2019-2020 Jun Zhang (The Australian National University)
5 | * For more information see
6 | *
7 | **/
8 |
9 | #ifndef CONVERTER_H
10 | #define CONVERTER_H
11 |
12 | #include
13 |
14 | #include
15 | #include"dependencies/g2o/g2o/types/types_six_dof_expmap.h"
16 | #include"dependencies/g2o/g2o/types/types_seven_dof_expmap.h"
17 |
18 | namespace VDO_SLAM
19 | {
20 |
21 | class Converter
22 | {
23 | public:
24 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
25 | static std::vector toDescriptorVector(const cv::Mat &Descriptors);
26 |
27 | static g2o::SE3Quat toSE3Quat(const cv::Mat &cvT);
28 | static g2o::SE3Quat toSE3Quat(const g2o::Sim3 &gSim3);
29 |
30 | static cv::Mat toCvMat(const g2o::SE3Quat &SE3);
31 | static cv::Mat toCvMat(const g2o::Sim3 &Sim3);
32 | static cv::Mat toCvMat(const Eigen::Matrix &m);
33 | static cv::Mat toCvMat(const Eigen::Matrix3d &m);
34 | static cv::Mat toCvMat(const Eigen::Matrix &m);
35 | static cv::Mat toCvSE3(const Eigen::Matrix &R, const Eigen::Matrix &t);
36 |
37 | static Eigen::Matrix toVector3d(const cv::Mat &cvVector);
38 | static Eigen::Matrix toVector3d(const cv::Point3f &cvPoint);
39 | static Eigen::Matrix toMatrix3d(const cv::Mat &cvMat3);
40 | static Eigen::Matrix toMatrix4d(const cv::Mat &cvMat4);
41 |
42 | static std::vector toQuaternion(const cv::Mat &M);
43 | static cv::Mat toInvMatrix(const cv::Mat &T);
44 | };
45 |
46 | }// namespace VDO_SLAM
47 |
48 | #endif // CONVERTER_H
49 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/midas/backbones/swin_common.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 | import torch.nn as nn
4 | import numpy as np
5 |
6 | from .utils import activations, forward_default, get_activation, Transpose
7 |
8 |
9 | def forward_swin(pretrained, x):
10 | return forward_default(pretrained, x)
11 |
12 |
13 | def _make_swin_backbone(
14 | model,
15 | hooks=[1, 1, 17, 1],
16 | patch_grid=[96, 96]
17 | ):
18 | pretrained = nn.Module()
19 |
20 | pretrained.model = model
21 | pretrained.model.layers[0].blocks[hooks[0]].register_forward_hook(get_activation("1"))
22 | pretrained.model.layers[1].blocks[hooks[1]].register_forward_hook(get_activation("2"))
23 | pretrained.model.layers[2].blocks[hooks[2]].register_forward_hook(get_activation("3"))
24 | pretrained.model.layers[3].blocks[hooks[3]].register_forward_hook(get_activation("4"))
25 |
26 | pretrained.activations = activations
27 |
28 | if hasattr(model, "patch_grid"):
29 | used_patch_grid = model.patch_grid
30 | else:
31 | used_patch_grid = patch_grid
32 |
33 | patch_grid_size = np.array(used_patch_grid, dtype=int)
34 |
35 | pretrained.act_postprocess1 = nn.Sequential(
36 | Transpose(1, 2),
37 | nn.Unflatten(2, torch.Size(patch_grid_size.tolist()))
38 | )
39 | pretrained.act_postprocess2 = nn.Sequential(
40 | Transpose(1, 2),
41 | nn.Unflatten(2, torch.Size((patch_grid_size // 2).tolist()))
42 | )
43 | pretrained.act_postprocess3 = nn.Sequential(
44 | Transpose(1, 2),
45 | nn.Unflatten(2, torch.Size((patch_grid_size // 4).tolist()))
46 | )
47 | pretrained.act_postprocess4 = nn.Sequential(
48 | Transpose(1, 2),
49 | nn.Unflatten(2, torch.Size((patch_grid_size // 8).tolist()))
50 | )
51 |
52 | return pretrained
53 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/types/isometry3d_gradients.cpp:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #include "isometry3d_gradients.h"
28 | #include
29 | namespace g2o {
30 | using namespace std;
31 | using namespace Eigen;
32 |
33 | namespace internal {
34 | #include "dquat2mat.cpp"
35 | } // end namespace internal
36 |
37 | } // end namespace
38 |
--------------------------------------------------------------------------------
/YOLOv8/bytetrack/include/lapjv.h:
--------------------------------------------------------------------------------
1 | #ifndef LAPJV_H
2 | #define LAPJV_H
3 |
4 | namespace byte_track
5 | {
6 | #define LARGE 1000000
7 |
8 | #if !defined TRUE
9 | #define TRUE 1
10 | #endif
11 | #if !defined FALSE
12 | #define FALSE 0
13 | #endif
14 |
15 | #define NEW(x, t, n) if ((x = (t *)malloc(sizeof(t) * (n))) == 0) { return -1; }
16 | #define FREE(x) if (x != 0) { free(x); x = 0; }
17 | #define SWAP_INDICES(a, b) { int_t _temp_index = a; a = b; b = _temp_index; }
18 |
19 | #if 0
20 | #include
21 | #define ASSERT(cond) assert(cond)
22 | #define PRINTF(fmt, ...) printf(fmt, ##__VA_ARGS__)
23 | #define PRINT_COST_ARRAY(a, n) \
24 | while (1) { \
25 | printf(#a" = ["); \
26 | if ((n) > 0) { \
27 | printf("%f", (a)[0]); \
28 | for (uint_t j = 1; j < n; j++) { \
29 | printf(", %f", (a)[j]); \
30 | } \
31 | } \
32 | printf("]\n"); \
33 | break; \
34 | }
35 | #define PRINT_INDEX_ARRAY(a, n) \
36 | while (1) { \
37 | printf(#a" = ["); \
38 | if ((n) > 0) { \
39 | printf("%d", (a)[0]); \
40 | for (uint_t j = 1; j < n; j++) { \
41 | printf(", %d", (a)[j]); \
42 | } \
43 | } \
44 | printf("]\n"); \
45 | break; \
46 | }
47 | #else
48 | #define ASSERT(cond)
49 | #define PRINTF(fmt, ...)
50 | #define PRINT_COST_ARRAY(a, n)
51 | #define PRINT_INDEX_ARRAY(a, n)
52 | #endif
53 |
54 |
55 | typedef signed int int_t;
56 | typedef unsigned int uint_t;
57 | typedef double cost_t;
58 | typedef char boolean;
59 | typedef enum fp_t { FP_1 = 1, FP_2 = 2, FP_DYNAMIC = 3 } fp_t;
60 |
61 | extern int_t lapjv_internal(
62 | const uint_t n, cost_t *cost[],
63 | int_t *x, int_t *y);
64 |
65 | #endif // LAPJV_H
66 | }
--------------------------------------------------------------------------------
/include/Viewer.h:
--------------------------------------------------------------------------------
1 | #ifndef VIEWER_H_
2 | #define VIEWER_H_
3 |
4 | #include "Map.h"
5 |
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 |
12 | namespace VDO_SLAM {
13 |
14 | using namespace std;
15 | class Map;
16 | // Visualization for DSO
17 |
18 | /**
19 | * viewer implemented by pangolin
20 | */
21 | class Viewer {
22 | public:
23 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
24 |
25 | Viewer(int w, int h, bool startRunThread = true);
26 |
27 | ~Viewer();
28 |
29 | void run();
30 |
31 | void shutdown();
32 |
33 | void publishPointPoseFrame(Map* pMap, cv::Mat feat_, cv::Mat seg_, cv::Mat flow_);
34 | void GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M, pangolin::OpenGlMatrix &MOw);
35 | // void pushLiveFrame( shared_ptr image);
36 |
37 | /* call on finish */
38 | void join();
39 | bool isFinished();
40 |
41 | private:
42 |
43 | thread runThread;
44 | bool running = true;
45 | int w, h;
46 |
47 | std::vector vmCameraPose;
48 | std::map> objTrajectory;
49 | std::map> vp3DPointDyn;
50 | std::vector vp3DPointSta; // staic points
51 |
52 | cv::Mat feat;
53 | cv::Mat seg;
54 | cv::Mat flow;
55 |
56 | //bool videoImgChanged = true;
57 | // 3D model rendering
58 | std::mutex myMutex;
59 |
60 | // timings
61 | struct timeval last_track;
62 | struct timeval last_map;
63 |
64 | std::deque lastNTrackingMs;
65 | std::deque lastNMappingMs;
66 | bool mbIsFinished;
67 |
68 | };
69 |
70 | }
71 |
72 | #endif // LDSO_VIEWER_H_
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/robust_kernel.cpp:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #include "robust_kernel.h"
28 |
29 | namespace g2o {
30 |
31 | RobustKernel::RobustKernel() :
32 | _delta(1.)
33 | {
34 | }
35 |
36 | RobustKernel::RobustKernel(double delta) :
37 | _delta(delta)
38 | {
39 | }
40 |
41 | void RobustKernel::setDelta(double delta)
42 | {
43 | _delta = delta;
44 | }
45 |
46 | } // end namespace g2o
47 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/stuff/opengl_wrapper.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, H. Strasdat, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_OPENGL_WRAPPER_H
28 | #define G2O_OPENGL_WRAPPER_H
29 |
30 | #include "g2o/config.h"
31 |
32 | #ifdef WINDOWS
33 | #include
34 | #endif
35 |
36 | #ifdef G2O_OPENGL_FOUND
37 | # ifdef __APPLE__
38 | # include
39 | # else
40 | # include
41 | # endif
42 | #endif
43 |
44 | #endif
45 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/solvers/csparse_extension.h:
--------------------------------------------------------------------------------
1 | // CSparse: a Concise Sparse matrix package.
2 | // Copyright (c) 2006, Timothy A. Davis.
3 | // http://www.cise.ufl.edu/research/sparse/CSparse
4 | //
5 | // --------------------------------------------------------------------------------
6 | //
7 | // CSparse is free software; you can redistribute it and/or
8 | // modify it under the terms of the GNU Lesser General Public
9 | // License as published by the Free Software Foundation; either
10 | // version 2.1 of the License, or (at your option) any later version.
11 | //
12 | // CSparse is distributed in the hope that it will be useful,
13 | // but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 | // Lesser General Public License for more details.
16 | //
17 | // You should have received a copy of the GNU Lesser General Public
18 | // License along with this Module; if not, write to the Free Software
19 | // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 |
21 | #ifndef G2O_CSPARSE_EXTENSION_H
22 | #define G2O_CSPARSE_EXTENSION_H
23 |
24 | #ifndef NCOMPLEX
25 | #define NCOMPLEX
26 | #endif
27 | #include
28 |
29 | #include "g2o_csparse_extension_api.h"
30 |
31 | namespace g2o {
32 |
33 | namespace csparse_extension {
34 |
35 | // our extensions to csparse
36 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
37 | /**
38 | * Originally from CSparse, avoid memory re-allocations by giving workspace pointers
39 | * CSparse: Copyright (c) 2006-2011, Timothy A. Davis.
40 | */
41 | G2O_CSPARSE_EXTENSION_API csn* cs_chol_workspace (const cs *A, const css *S, int* cin, number_t* xin);
42 | G2O_CSPARSE_EXTENSION_API int cs_cholsolsymb(const cs *A, number_t *b, const css* S, number_t* workspace, int* work);
43 |
44 | } // end namespace
45 | } // end namespace
46 |
47 | #endif
48 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/java/org/tensorflow/lite/examples/classification/customview/OverlayView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.customview;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.util.AttributeSet;
21 | import android.view.View;
22 | import java.util.LinkedList;
23 | import java.util.List;
24 |
25 | /** A simple View providing a render callback to other classes. */
26 | public class OverlayView extends View {
27 | private final List callbacks = new LinkedList();
28 |
29 | public OverlayView(final Context context, final AttributeSet attrs) {
30 | super(context, attrs);
31 | }
32 |
33 | public void addCallback(final DrawCallback callback) {
34 | callbacks.add(callback);
35 | }
36 |
37 | @Override
38 | public synchronized void draw(final Canvas canvas) {
39 | for (final DrawCallback callback : callbacks) {
40 | callback.drawCallback(canvas);
41 | }
42 | }
43 |
44 | /** Interface defining the callback for client classes. */
45 | public interface DrawCallback {
46 | public void drawCallback(final Canvas canvas);
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/classification/tflite/ClassifierQuantizedEfficientNet.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.tflite;
17 |
18 | import android.app.Activity;
19 | import java.io.IOException;
20 |
21 | /** This TensorFlow Lite classifier works with the quantized EfficientNet model. */
22 | public class ClassifierQuantizedEfficientNet extends Classifier {
23 |
24 | /**
25 | * Initializes a {@code ClassifierQuantizedMobileNet}.
26 | *
27 | * @param device a {@link Device} object to configure the hardware accelerator
28 | * @param numThreads the number of threads during the inference
29 | * @throws IOException if the model is not loaded correctly
30 | */
31 | public ClassifierQuantizedEfficientNet(Activity activity, Device device, int numThreads)
32 | throws IOException {
33 | super(activity, device, numThreads);
34 | }
35 |
36 | @Override
37 | protected String getModelPath() {
38 | // you can download this file from
39 | // see build.gradle for where to obtain this file. It should be auto
40 | // downloaded into assets.
41 | return "efficientnet-lite0-int8.tflite";
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/ZoeDepth/sanity_hub.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 | import torch
26 | import numpy as np
27 | from torchvision.transforms import ToTensor
28 | from PIL import Image
29 | from zoedepth.utils.misc import get_image_from_url, colorize
30 |
31 | from zoedepth.models.builder import build_model
32 | from zoedepth.utils.config import get_config
33 | from pprint import pprint
34 |
35 |
36 |
37 | # Trigger reload of MiDaS
38 | torch.hub.help("intel-isl/MiDaS", "DPT_BEiT_L_384", force_reload=True)
39 |
40 |
41 | model = torch.hub.load(".", "ZoeD_K", source="local", pretrained=True)
42 | model = torch.hub.load(".", "ZoeD_NK", source="local", pretrained=True)
43 | model = torch.hub.load(".", "ZoeD_N", source="local", pretrained=True)
44 |
--------------------------------------------------------------------------------
/include/System.h:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is part of VDO-SLAM.
3 | *
4 | * Copyright (C) 2019-2020 Jun Zhang (The Australian National University)
5 | * For more information see
6 | *
7 | **/
8 |
9 |
10 | #ifndef SYSTEM_H
11 | #define SYSTEM_H
12 |
13 | #include
14 | #include
15 | #include
16 |
17 | #include "Tracking.h"
18 | #include "Map.h"
19 | #include "Viewer.h"
20 |
21 | namespace VDO_SLAM
22 | {
23 |
24 | using namespace std;
25 |
26 | class Map;
27 | class Tracking;
28 | class Viewer;
29 |
30 | class System
31 | {
32 | public:
33 |
34 | // Input sensor
35 | enum eSensor{
36 | MONOCULAR=0,
37 | STEREO=1,
38 | RGBD=2
39 | };
40 |
41 | public:
42 |
43 | // Initialize the SLAM system.
44 | System(const string &strSettingsFile, const eSensor sensor);
45 |
46 |
47 | // Process the given rgbd frame. Depthmap must be registered to the RGB frame.
48 | // Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
49 | // Input depthmap: Float (CV_32F).
50 | // Returns the camera pose (empty if tracking fails).
51 | cv::Mat TrackRGBD(const cv::Mat &im, cv::Mat &depthmap, const cv::Mat &flowmap, const cv::Mat &flowV, const cv::Mat &masksem, cv::Mat &imDynaSem,
52 | const cv::Mat &mTcw_gt, const vector > &vObjPose_gt, const double ×tamp,
53 | const bool bIsEnd);
54 |
55 | void SaveResults(const string &filename);
56 | void shutdown();
57 |
58 | private:
59 |
60 | // Input sensor
61 | eSensor mSensor;
62 |
63 | // Map structure.
64 | Map* mpMap;
65 |
66 | // Tracker. It receives a frame and computes the associated camera pose.
67 | Tracking* mpTracker;
68 | Viewer* mpViewer;
69 |
70 | };
71 |
72 | }// namespace VDO_SLAM
73 |
74 | #endif // SYSTEM_H
75 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | defaultConfig {
6 | applicationId "org.tensorflow.lite.examples.classification"
7 | minSdkVersion 21
8 | targetSdkVersion 28
9 | versionCode 1
10 | versionName "1.0"
11 |
12 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
13 | }
14 | buildTypes {
15 | release {
16 | minifyEnabled false
17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
18 | }
19 | }
20 | aaptOptions {
21 | noCompress "tflite"
22 | }
23 | compileOptions {
24 | sourceCompatibility = '1.8'
25 | targetCompatibility = '1.8'
26 | }
27 | lintOptions {
28 | abortOnError false
29 | }
30 | flavorDimensions "tfliteInference"
31 | productFlavors {
32 | // The TFLite inference is built using the TFLite Support library.
33 | support {
34 | dimension "tfliteInference"
35 | }
36 | // The TFLite inference is built using the TFLite Task library.
37 | taskApi {
38 | dimension "tfliteInference"
39 | }
40 | }
41 |
42 | }
43 |
44 | dependencies {
45 | implementation fileTree(dir: 'libs', include: ['*.jar'])
46 | supportImplementation project(":lib_support")
47 | taskApiImplementation project(":lib_task_api")
48 | implementation 'androidx.appcompat:appcompat:1.0.0'
49 | implementation 'androidx.coordinatorlayout:coordinatorlayout:1.0.0'
50 | implementation 'com.google.android.material:material:1.0.0'
51 |
52 | androidTestImplementation 'androidx.test.ext:junit:1.1.1'
53 | androidTestImplementation 'com.google.truth:truth:1.0.1'
54 | androidTestImplementation 'androidx.test:runner:1.2.0'
55 | androidTestImplementation 'androidx.test:rules:1.1.0'
56 | }
57 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/classification/tflite/ClassifierFloatMobileNet.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.tflite;
17 |
18 | import android.app.Activity;
19 | import java.io.IOException;
20 | import org.tensorflow.lite.examples.classification.tflite.Classifier.Device;
21 |
22 | /** This TensorFlowLite classifier works with the float MobileNet model. */
23 | public class ClassifierFloatMobileNet extends Classifier {
24 | /**
25 | * Initializes a {@code ClassifierFloatMobileNet}.
26 | *
27 | * @param device a {@link Device} object to configure the hardware accelerator
28 | * @param numThreads the number of threads during the inference
29 | * @throws IOException if the model is not loaded correctly
30 | */
31 | public ClassifierFloatMobileNet(Activity activity, Device device, int numThreads)
32 | throws IOException {
33 | super(activity, device, numThreads);
34 | }
35 |
36 | @Override
37 | protected String getModelPath() {
38 | // you can download this file from
39 | // see build.gradle for where to obtain this file. It should be auto
40 | // downloaded into assets.
41 | return "mobilenet_v1_1.0_224.tflite";
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/YOLOv8/bytetrack/include/BYTETracker.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include "STrack.h"
4 | #include "data_struct.h"
5 | #include
6 | #include
7 |
8 | namespace byte_track
9 | {
10 |
11 | class BYTETracker
12 | {
13 | public:
14 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
15 | BYTETracker(int frame_rate = 30, int track_buffer = 30);
16 | ~BYTETracker();
17 |
18 | vector update(const ImagesSegmentedObject& objects);
19 | Scalar get_color(int idx);
20 |
21 | private:
22 | vector joint_stracks(vector &tlista, vector &tlistb);
23 |
24 | vector sub_stracks(vector &tlista, vector &tlistb);
25 | void remove_duplicate_stracks(vector &resa, vector &resb, vector &stracksa, vector &stracksb);
26 |
27 | void linear_assignment(vector > &cost_matrix, int cost_matrix_size, int cost_matrix_size_size, float thresh,
28 | vector > &matches, vector &unmatched_a, vector &unmatched_b);
29 | vector > iou_distance(vector &atracks, vector &btracks, int &dist_size, int &dist_size_size);
30 | vector > iou_distance(vector &atracks, vector &btracks);
31 | vector > ious(vector > &atlbrs, vector > &btlbrs);
32 |
33 | double lapjv(const vector > &cost, vector &rowsol, vector &colsol,
34 | bool extend_cost = false, float cost_limit = LONG_MAX, bool return_cost = true);
35 |
36 | private:
37 |
38 | float track_thresh;
39 | float high_thresh;
40 | float match_thresh;
41 | int frame_id;
42 | int max_time_lost;
43 |
44 | vector tracked_stracks;
45 | vector lost_stracks;
46 | vector removed_stracks;
47 | KalmanFilter kalman_filter;
48 | };
49 | }
--------------------------------------------------------------------------------
/YOLOv8/include/segmentor_opencv_dnn.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include
3 | #include "opencv2/core/mat.hpp"
4 | #include "data_struct.h"
5 |
6 |
7 |
8 | class Segmentor_OpenCV_DNN
9 | {
10 | public:
11 | Segmentor_OpenCV_DNN();
12 |
13 | bool LoadModel(std::string& modelPath);
14 | BatchSegmentedObject Run(MatVector& srcImgList);
15 |
16 | void setBatchSize(int newBatch);
17 | void setInputSize(cv::Size newInputSize);
18 | void setClassNames(std::vector newClassNamesList);
19 | std::string getClassName(int classId);
20 |
21 | void setDynamicClassNames(std::vector classNamesDynamicList);
22 | bool whetherInDynamicClass(std::string className);
23 |
24 | private:
25 | cv::dnn::Net model;
26 | float _classThreshold = 0.25;
27 | float _nmsThreshold = 0.45;
28 | float _maskThreshold = 0.5;
29 | int _batchSize = 1;
30 | cv::Size _inputSize = cv::Size(640, 640);
31 | std::vector _classNamesList;
32 | std::vector _classNamesDynamicList;
33 |
34 | void LetterBox(const cv::Mat& image,
35 | cv::Mat& outImage,
36 | cv::Vec4d& params,
37 | const cv::Size& newShape = cv::Size(640, 640),
38 | bool autoShape = false,
39 | bool scaleFill = false,
40 | bool scaleUp = true,
41 | int stride = 32,
42 | const cv::Scalar& color = cv::Scalar(114, 114, 114));
43 | void GetMask(const cv::Mat& maskProposals,
44 | const cv::Mat& maskProtos,
45 | ImagesSegmentedObject& output,
46 | const MaskParams& maskParams);
47 | void GetMask2(const cv::Mat& maskProposals,
48 | const cv::Mat& maskProtos,
49 | SegmentedObject& output,
50 | const MaskParams& maskParams);
51 | void calcContours(ImagesSegmentedObject& output);
52 |
53 | };
54 |
--------------------------------------------------------------------------------
/VideoFlow/core/utils/logger.py:
--------------------------------------------------------------------------------
1 | from torch.utils.tensorboard import SummaryWriter
2 | from loguru import logger as loguru_logger
3 |
4 | class Logger:
5 | def __init__(self, model, scheduler, cfg):
6 | self.model = model
7 | self.scheduler = scheduler
8 | self.total_steps = 0
9 | self.running_loss = {}
10 | self.writer = None
11 | self.cfg = cfg
12 |
13 | def _print_training_status(self):
14 | metrics_data = [self.running_loss[k]/self.cfg.sum_freq for k in sorted(self.running_loss.keys())]
15 | training_str = "[{:6d}, {}] ".format(self.total_steps+1, self.scheduler.get_last_lr())
16 | metrics_str = ("{:10.4f}, "*len(metrics_data)).format(*metrics_data)
17 |
18 | # print the training status
19 | loguru_logger.info(training_str + metrics_str)
20 |
21 | if self.writer is None:
22 | if self.cfg.log_dir is None:
23 | self.writer = SummaryWriter()
24 | else:
25 | self.writer = SummaryWriter(self.cfg.log_dir)
26 |
27 | for k in self.running_loss:
28 | self.writer.add_scalar(k, self.running_loss[k]/self.cfg.sum_freq, self.total_steps)
29 | self.running_loss[k] = 0.0
30 |
31 | def push(self, metrics):
32 | self.total_steps += 1
33 |
34 | for key in metrics:
35 | if key not in self.running_loss:
36 | self.running_loss[key] = 0.0
37 |
38 | self.running_loss[key] += metrics[key]
39 |
40 | if self.total_steps % self.cfg.sum_freq == self.cfg.sum_freq-1:
41 | self._print_training_status()
42 | self.running_loss = {}
43 |
44 | def write_dict(self, results):
45 | if self.writer is None:
46 | self.writer = SummaryWriter()
47 |
48 | for key in results:
49 | self.writer.add_scalar(key, results[key], self.total_steps)
50 |
51 | def close(self):
52 | self.writer.close()
53 |
54 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/classification/tflite/ClassifierQuantizedMobileNet.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.tflite;
17 |
18 | import android.app.Activity;
19 | import java.io.IOException;
20 | import org.tensorflow.lite.examples.classification.tflite.Classifier.Device;
21 |
22 | /** This TensorFlow Lite classifier works with the quantized MobileNet model. */
23 | public class ClassifierQuantizedMobileNet extends Classifier {
24 |
25 | /**
26 | * Initializes a {@code ClassifierQuantizedMobileNet}.
27 | *
28 | * @param device a {@link Device} object to configure the hardware accelerator
29 | * @param numThreads the number of threads during the inference
30 | * @throws IOException if the model is not loaded correctly
31 | */
32 | public ClassifierQuantizedMobileNet(Activity activity, Device device, int numThreads)
33 | throws IOException {
34 | super(activity, device, numThreads);
35 | }
36 |
37 | @Override
38 | protected String getModelPath() {
39 | // you can download this file from
40 | // see build.gradle for where to obtain this file. It should be auto
41 | // downloaded into assets.
42 | return "mobilenet_v1_1.0_224_quant.tflite";
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/zoedepth/config_zoedepth.json:
--------------------------------------------------------------------------------
1 | {
2 | "model": {
3 | "name": "ZoeDepth",
4 | "version_name": "v1",
5 | "n_bins": 64,
6 | "bin_embedding_dim": 128,
7 | "bin_centers_type": "softplus",
8 | "n_attractors":[16, 8, 4, 1],
9 | "attractor_alpha": 1000,
10 | "attractor_gamma": 2,
11 | "attractor_kind" : "mean",
12 | "attractor_type" : "inv",
13 | "midas_model_type" : "DPT_BEiT_L_384",
14 | "min_temp": 0.0212,
15 | "max_temp": 50.0,
16 | "output_distribution": "logbinomial",
17 | "memory_efficient": true,
18 | "inverse_midas": false,
19 | "img_size": [384, 512]
20 | },
21 |
22 | "train": {
23 | "train_midas": true,
24 | "use_pretrained_midas": true,
25 | "trainer": "zoedepth",
26 | "epochs": 5,
27 | "bs": 16,
28 | "optim_kwargs": {"lr": 0.000161, "wd": 0.01},
29 | "sched_kwargs": {"div_factor": 1, "final_div_factor": 10000, "pct_start": 0.7, "three_phase":false, "cycle_momentum": true},
30 | "same_lr": false,
31 | "w_si": 1,
32 | "w_domain": 0.2,
33 | "w_reg": 0,
34 | "w_grad": 0,
35 | "avoid_boundary": false,
36 | "random_crop": false,
37 | "input_width": 640,
38 | "input_height": 480,
39 | "midas_lr_factor": 1,
40 | "encoder_lr_factor":10,
41 | "pos_enc_lr_factor":10,
42 | "freeze_midas_bn": true
43 |
44 | },
45 |
46 | "infer":{
47 | "train_midas": false,
48 | "use_pretrained_midas": false,
49 | "pretrained_resource" : "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_N.pt",
50 | "force_keep_ar": true
51 | },
52 |
53 | "eval":{
54 | "train_midas": false,
55 | "use_pretrained_midas": false,
56 | "pretrained_resource" : "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_N.pt"
57 | }
58 | }
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/classification/tflite/ClassifierFloatEfficientNet.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.classification.tflite;
17 |
18 | import android.app.Activity;
19 | import java.io.IOException;
20 | import org.tensorflow.lite.examples.classification.tflite.Classifier.Device;
21 |
22 | /** This TensorFlowLite classifier works with the float EfficientNet model. */
23 | public class ClassifierFloatEfficientNet extends Classifier {
24 |
25 | /**
26 | * Initializes a {@code ClassifierFloatMobileNet}.
27 | *
28 | * @param device a {@link Device} object to configure the hardware accelerator
29 | * @param numThreads the number of threads during the inference
30 | * @throws IOException if the model is not loaded correctly
31 | */
32 | public ClassifierFloatEfficientNet(Activity activity, Device device, int numThreads)
33 | throws IOException {
34 | super(activity, device, numThreads);
35 | }
36 |
37 | @Override
38 | protected String getModelPath() {
39 | // you can download this file from
40 | // see build.gradle for where to obtain this file. It should be auto
41 | // downloaded into assets.
42 | //return "efficientnet-lite0-fp32.tflite";
43 | return "model.tflite";
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Views/OverlayView.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | import UIKit
16 |
17 | /// UIView for rendering inference output.
18 | class OverlayView: UIView {
19 |
20 | var dots = [CGPoint]()
21 | var lines = [Line]()
22 |
23 | override func draw(_ rect: CGRect) {
24 | for dot in dots {
25 | drawDot(of: dot)
26 | }
27 | for line in lines {
28 | drawLine(of: line)
29 | }
30 | }
31 |
32 | func drawDot(of dot: CGPoint) {
33 | let dotRect = CGRect(
34 | x: dot.x - Traits.dot.radius / 2, y: dot.y - Traits.dot.radius / 2,
35 | width: Traits.dot.radius, height: Traits.dot.radius)
36 | let dotPath = UIBezierPath(ovalIn: dotRect)
37 |
38 | Traits.dot.color.setFill()
39 | dotPath.fill()
40 | }
41 |
42 | func drawLine(of line: Line) {
43 | let linePath = UIBezierPath()
44 | linePath.move(to: CGPoint(x: line.from.x, y: line.from.y))
45 | linePath.addLine(to: CGPoint(x: line.to.x, y: line.to.y))
46 | linePath.close()
47 |
48 | linePath.lineWidth = Traits.line.width
49 | Traits.line.color.setStroke()
50 |
51 | linePath.stroke()
52 | }
53 |
54 | func clear() {
55 | self.dots = []
56 | self.lines = []
57 | }
58 | }
59 |
60 | private enum Traits {
61 | static let dot = (radius: CGFloat(5), color: UIColor.orange)
62 | static let line = (width: CGFloat(1.0), color: UIColor.orange)
63 | }
64 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/solvers/csparse_helper.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_CSPARSE_HELPER_H
28 | #define G2O_CSPARSE_HELPER_H
29 |
30 | #include "csparse_extension.h"
31 |
32 | #include "g2o_csparse_extension_api.h"
33 |
34 | namespace g2o {
35 |
36 | namespace csparse_extension {
37 |
38 | /**
39 | * write the sparse matrix to a file loadable with ocatve
40 | */
41 | G2O_CSPARSE_EXTENSION_API bool writeCs2Octave(const char* filename, const cs* A, bool upperTriangular = true);
42 |
43 | } // end namespace
44 | } // end namespace
45 |
46 | #endif
47 |
--------------------------------------------------------------------------------
/VideoFlow/flow_dataset_mf/convert_things.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 | import pickle
5 |
6 | root = "/mnt/lustre/share/cp/caodongliang/FlyingThings3D/"
7 |
8 | for dstype in ['frames_cleanpass', 'frames_finalpass']:
9 | image_list = []
10 | fflow_list = []
11 | pflow_list = []
12 |
13 |
14 | for cam in ['left']:
15 | image_dirs = sorted(glob(osp.join(root, dstype, 'TRAIN/*/*')))
16 | image_dirs = sorted([osp.join(f, cam) for f in image_dirs])
17 |
18 | flow_dirs = sorted(glob(osp.join(root, 'optical_flow/TRAIN/*/*')))
19 | flow_future_dirs = sorted([osp.join(f, 'into_future', cam) for f in flow_dirs])
20 | flow_past_dirs = sorted([osp.join(f, 'into_past', cam) for f in flow_dirs])
21 |
22 | for idir, fdir, pdir in zip(image_dirs, flow_future_dirs, flow_past_dirs):
23 | images = sorted(glob(osp.join(idir, '*.png')) )
24 | future_flows = sorted(glob(osp.join(fdir, '*.pfm')) )
25 | past_flows = sorted(glob(osp.join(pdir, '*.pfm')) )
26 |
27 | for idx in range(len(images)):
28 | images[idx] = images[idx].replace("/mnt/lustre/share/cp/caodongliang/FlyingThings3D", "flow_data") + "\n"
29 | for idx in range(len(future_flows)):
30 | future_flows[idx] = future_flows[idx].replace("/mnt/lustre/share/cp/caodongliang/FlyingThings3D", "flow_data") + "\n"
31 | for idx in range(len(past_flows)):
32 | past_flows[idx] = past_flows[idx].replace("/mnt/lustre/share/cp/caodongliang/FlyingThings3D", "flow_data") + "\n"
33 |
34 | image_list.append(images)
35 | fflow_list.append(future_flows)
36 | pflow_list.append(past_flows)
37 |
38 | with open("flyingthings_"+dstype+"_png.pkl", 'wb') as f:
39 | pickle.dump(image_list, f)
40 | with open("flyingthings_"+dstype+"_future_pfm.pkl", 'wb') as f:
41 | pickle.dump(fflow_list, f)
42 | with open("flyingthings_"+dstype+"_past_pfm.pkl", 'wb') as f:
43 | pickle.dump(pflow_list, f)
44 |
--------------------------------------------------------------------------------
/VideoFlow/flow_datasets/flying_things_three_frames/convert_things.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 |
5 | root = "/mnt/lustre/share/cp/caodongliang/FlyingThings3D/"
6 |
7 | for dstype in ['frames_cleanpass', 'frames_finalpass']:
8 | image_list = []
9 | flow_list = []
10 | for cam in ['left']:
11 | image_dirs = sorted(glob(osp.join(root, dstype, 'TRAIN/*/*')))
12 | image_dirs = sorted([osp.join(f, cam) for f in image_dirs])
13 |
14 | flow_dirs = sorted(glob(osp.join(root, 'optical_flow/TRAIN/*/*')))
15 | flow_future_dirs = sorted([osp.join(f, 'into_future', cam) for f in flow_dirs])
16 | flow_past_dirs = sorted([osp.join(f, 'into_past', cam) for f in flow_dirs])
17 |
18 | for idir, fdir, pdir in zip(image_dirs, flow_future_dirs, flow_past_dirs):
19 | images = sorted(glob(osp.join(idir, '*.png')) )
20 | future_flows = sorted(glob(osp.join(fdir, '*.pfm')) )
21 | past_flows = sorted(glob(osp.join(pdir, '*.pfm')) )
22 |
23 | for i in range(1, len(images)-1):
24 | image_list.append(images[i-1])
25 | image_list.append(images[i])
26 | image_list.append(images[i+1])
27 |
28 | flow_list.append(future_flows[i])
29 | flow_list.append(past_flows[i])
30 |
31 | for idx in range(len(image_list)):
32 | image_list[idx] = image_list[idx].replace("/mnt/lustre/share/cp/caodongliang/FlyingThings3D", "flow_data") + "\n"
33 | for idx in range(len(flow_list)):
34 | flow_list[idx] = flow_list[idx].replace("/mnt/lustre/share/cp/caodongliang/FlyingThings3D", "flow_data") + "\n"
35 |
36 |
37 | with open(osp.join("flying_things_three_frames", "flyingthings_"+dstype+"_png.txt"), 'w') as f:
38 | f.writelines(image_list)
39 | print(len(image_list))
40 | with open(osp.join("flying_things_three_frames", "flyingthings_"+dstype+"_pfm.txt"), 'w') as f:
41 | f.writelines(flow_list)
42 | print(len(flow_list))
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/types/se3_ops.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 H. Strasdat
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_MATH_STUFF
28 | #define G2O_MATH_STUFF
29 |
30 | #include
31 | #include
32 |
33 | namespace g2o {
34 | using namespace Eigen;
35 |
36 | inline Matrix3d skew(const Vector3d&v);
37 | inline Vector3d deltaR(const Matrix3d& R);
38 | inline Vector2d project(const Vector3d&);
39 | inline Vector3d project(const Vector4d&);
40 | inline Vector3d unproject(const Vector2d&);
41 | inline Vector4d unproject(const Vector3d&);
42 |
43 | #include "se3_ops.hpp"
44 |
45 | }
46 |
47 | #endif //MATH_STUFF
48 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/mobile/ios/Midas/Extensions/CGSizeExtension.swift:
--------------------------------------------------------------------------------
1 | // Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | // =============================================================================
15 |
16 | import Accelerate
17 | import Foundation
18 |
19 | extension CGSize {
20 | /// Returns `CGAfineTransform` to resize `self` to fit in destination size, keeping aspect ratio
21 | /// of `self`. `self` image is resized to be inscribe to destination size and located in center of
22 | /// destination.
23 | ///
24 | /// - Parameter toFitIn: destination size to be filled.
25 | /// - Returns: `CGAffineTransform` to transform `self` image to `dest` image.
26 | func transformKeepAspect(toFitIn dest: CGSize) -> CGAffineTransform {
27 | let sourceRatio = self.height / self.width
28 | let destRatio = dest.height / dest.width
29 |
30 | // Calculates ratio `self` to `dest`.
31 | var ratio: CGFloat
32 | var x: CGFloat = 0
33 | var y: CGFloat = 0
34 | if sourceRatio > destRatio {
35 | // Source size is taller than destination. Resized to fit in destination height, and find
36 | // horizontal starting point to be centered.
37 | ratio = dest.height / self.height
38 | x = (dest.width - self.width * ratio) / 2
39 | } else {
40 | ratio = dest.width / self.width
41 | y = (dest.height - self.height * ratio) / 2
42 | }
43 | return CGAffineTransform(a: ratio, b: 0, c: 0, d: ratio, tx: x, ty: y)
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/midas_cpp/scripts/listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from __future__ import print_function
3 |
4 | import roslib
5 | #roslib.load_manifest('my_package')
6 | import sys
7 | import rospy
8 | import cv2
9 | import numpy as np
10 | from std_msgs.msg import String
11 | from sensor_msgs.msg import Image
12 | from cv_bridge import CvBridge, CvBridgeError
13 |
14 | class video_show:
15 |
16 | def __init__(self):
17 | self.show_output = rospy.get_param('~show_output', True)
18 | self.save_output = rospy.get_param('~save_output', False)
19 | self.output_video_file = rospy.get_param('~output_video_file','result.mp4')
20 | # rospy.loginfo(f"Listener - params: show_output={self.show_output}, save_output={self.save_output}, output_video_file={self.output_video_file}")
21 |
22 | self.bridge = CvBridge()
23 | self.image_sub = rospy.Subscriber("midas_topic", Image, self.callback)
24 |
25 | def callback(self, data):
26 | try:
27 | cv_image = self.bridge.imgmsg_to_cv2(data)
28 | except CvBridgeError as e:
29 | print(e)
30 | return
31 |
32 | if cv_image.size == 0:
33 | return
34 |
35 | rospy.loginfo("Listener: Received new frame")
36 | cv_image = cv_image.astype("uint8")
37 |
38 | if self.show_output==True:
39 | cv2.imshow("video_show", cv_image)
40 | cv2.waitKey(10)
41 |
42 | if self.save_output==True:
43 | if self.video_writer_init==False:
44 | fourcc = cv2.VideoWriter_fourcc(*'XVID')
45 | self.out = cv2.VideoWriter(self.output_video_file, fourcc, 25, (cv_image.shape[1], cv_image.shape[0]))
46 |
47 | self.out.write(cv_image)
48 |
49 |
50 |
51 | def main(args):
52 | rospy.init_node('listener', anonymous=True)
53 | ic = video_show()
54 | try:
55 | rospy.spin()
56 | except KeyboardInterrupt:
57 | print("Shutting down")
58 | cv2.destroyAllWindows()
59 |
60 | if __name__ == '__main__':
61 | main(sys.argv)
--------------------------------------------------------------------------------
/ZoeDepth/predict_depth_1.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from PIL import Image
3 | import tqdm
4 | import numpy as np
5 | from zoedepth.models.builder import build_model
6 | from zoedepth.utils.config import get_config
7 | from zoedepth.utils.misc import save_raw_16bit
8 | from zoedepth.utils.misc import colorize
9 |
10 | import os
11 | import glob
12 | import cv2
13 |
14 |
15 | # ZoeD_N
16 | conf = get_config("zoedepth", "infer", config_version='kitti')
17 | print(conf)
18 | #conf['save_dir'] = '/home/spurs/.cache/torch/hub/checkpoints'
19 | conf['pretrained_resource'] = 'local::./ZoeD_M12_K.pt'
20 | model_zoe_n = build_model(conf)
21 | #model_zoe_n = torch.hub.load(".", "ZoeD_N", source="local", pretrained=True)
22 |
23 |
24 | DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
25 | zoe = model_zoe_n.to(DEVICE)
26 |
27 | img_dir = '/home/spurs/dataset/30fps/2023_02_21_14_04_08/data/img_small/*.png'
28 | img_dir = '/home/spurs/dataset/2011_10_03/2011_10_03_drive_0047_sync/image_02/data/*.png'
29 | for img in tqdm.tqdm(glob.glob(img_dir)):
30 | image = Image.open(img).convert("RGB") # load
31 | #depth_numpy = zoe.infer_pil(image) # as numpy
32 |
33 | #depth_pil = zoe.infer_pil(image, output_type="pil") # as 16-bit PIL Image
34 | depth_tensor = zoe.infer_pil(image, output_type="tensor").cpu().detach().numpy() # as torch tensor
35 |
36 |
37 | #print(depth_tensor.shape)
38 | #print(depth_tensor.dtype, depth_tensor.min(), depth_tensor.max())
39 |
40 | fpath = "output.png"
41 | fpath = os.path.join('output')
42 | os.makedirs(fpath, exist_ok=True)
43 | fpath = os.path.join(fpath, os.path.basename(img))
44 |
45 | np.save(fpath + '.npy', depth_tensor)
46 | break
47 |
48 | '''
49 | save_raw_16bit(depth_tensor, fpath)
50 |
51 | image = Image.open(img).convert("L")
52 | image = np.asarray(image)
53 |
54 | print(image.shape, image.min(), image.max())
55 |
56 | #cv2.imwrite(fpath, depth_tensor)
57 | #colored = colorize(depth_tensor)
58 | #colored = colorize(depth_tensor, 0, 10)
59 | #Image.fromarray(colored).save(fpath)
60 |
61 | '''
62 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/ros/midas_cpp/scripts/listener_original.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from __future__ import print_function
3 |
4 | import roslib
5 | #roslib.load_manifest('my_package')
6 | import sys
7 | import rospy
8 | import cv2
9 | import numpy as np
10 | from std_msgs.msg import String
11 | from sensor_msgs.msg import Image
12 | from cv_bridge import CvBridge, CvBridgeError
13 |
14 | class video_show:
15 |
16 | def __init__(self):
17 | self.show_output = rospy.get_param('~show_output', True)
18 | self.save_output = rospy.get_param('~save_output', False)
19 | self.output_video_file = rospy.get_param('~output_video_file','result.mp4')
20 | # rospy.loginfo(f"Listener original - params: show_output={self.show_output}, save_output={self.save_output}, output_video_file={self.output_video_file}")
21 |
22 | self.bridge = CvBridge()
23 | self.image_sub = rospy.Subscriber("image_topic", Image, self.callback)
24 |
25 | def callback(self, data):
26 | try:
27 | cv_image = self.bridge.imgmsg_to_cv2(data)
28 | except CvBridgeError as e:
29 | print(e)
30 | return
31 |
32 | if cv_image.size == 0:
33 | return
34 |
35 | rospy.loginfo("Listener_original: Received new frame")
36 | cv_image = cv_image.astype("uint8")
37 |
38 | if self.show_output==True:
39 | cv2.imshow("video_show_orig", cv_image)
40 | cv2.waitKey(10)
41 |
42 | if self.save_output==True:
43 | if self.video_writer_init==False:
44 | fourcc = cv2.VideoWriter_fourcc(*'XVID')
45 | self.out = cv2.VideoWriter(self.output_video_file, fourcc, 25, (cv_image.shape[1], cv_image.shape[0]))
46 |
47 | self.out.write(cv_image)
48 |
49 |
50 |
51 | def main(args):
52 | rospy.init_node('listener_original', anonymous=True)
53 | ic = video_show()
54 | try:
55 | rospy.spin()
56 | except KeyboardInterrupt:
57 | print("Shutting down")
58 | cv2.destroyAllWindows()
59 |
60 | if __name__ == '__main__':
61 | main(sys.argv)
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/stuff/os_specific.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_OS_SPECIFIC_HH_
28 | #define G2O_OS_SPECIFIC_HH_
29 |
30 | #ifdef WINDOWS
31 | #include
32 | #include
33 | #include
34 | #ifndef _WINDOWS
35 | #include
36 | #endif
37 | #define drand48() ((double) rand()/(double)RAND_MAX)
38 |
39 | #ifdef __cplusplus
40 | extern "C" {
41 | #endif
42 |
43 | int vasprintf(char** strp, const char* fmt, va_list ap);
44 |
45 | #ifdef __cplusplus
46 | }
47 | #endif
48 |
49 | #endif
50 |
51 | #ifdef UNIX
52 | #include
53 | // nothing to do on real operating systems
54 | #endif
55 |
56 | #endif
57 |
--------------------------------------------------------------------------------
/VideoFlow/flow_datasets/sintel_three_frames/convert_sintel.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os.path as osp
3 | from glob import glob
4 | import os
5 |
6 | root = "/mnt/lustre/share/cp/caodongliang/MPI-Sintel/"
7 |
8 | for split in ['training']:
9 | for dstype in ['clean', 'final']:
10 | image_list = []
11 | flow_list = []
12 | extra_info_list = []
13 |
14 | flow_root = osp.join(root, split, 'flow')
15 | image_root = osp.join(root, split, dstype)
16 |
17 | for scene in os.listdir(image_root):
18 | images = sorted(glob(osp.join(image_root, scene, '*.png')))
19 | flows = sorted(glob(osp.join(flow_root, scene, '*.flo')))
20 |
21 | for i in range(len(images)-1):
22 | if i==0:
23 | image_list.append(images[0])
24 | else:
25 | image_list.append(images[i-1])
26 |
27 | image_list.append(images[i])
28 | image_list.append(images[i+1])
29 |
30 | flow_list.append(flows[i])
31 | extra_info_list.append(scene)
32 | extra_info_list.append(str(i))
33 |
34 | for idx in range(len(image_list)):
35 | image_list[idx] = image_list[idx].replace("/mnt/lustre/share/cp/caodongliang/MPI-Sintel", "Sintel") + "\n"
36 | for idx in range(len(flow_list)):
37 | flow_list[idx] = flow_list[idx].replace("/mnt/lustre/share/cp/caodongliang/MPI-Sintel", "Sintel") + "\n"
38 | for idx in range(len(extra_info_list)):
39 | extra_info_list[idx] = extra_info_list[idx] + "\n"
40 |
41 | with open(osp.join("sintel_three_frames", "Sintel_"+dstype+"_png.txt"), 'w') as f:
42 | f.writelines(image_list)
43 | print(len(image_list))
44 | with open(osp.join("sintel_three_frames", "Sintel_"+dstype+"_flo.txt"), 'w') as f:
45 | f.writelines(flow_list)
46 | print(len(flow_list))
47 | with open(osp.join("sintel_three_frames", "Sintel_"+dstype+"_extra_info.txt"), 'w') as f:
48 | f.writelines(extra_info_list)
49 | print(len(extra_info_list))
--------------------------------------------------------------------------------
/include/cvplot/highgui.h:
--------------------------------------------------------------------------------
1 | #ifndef CVPLOT_HIGHGUI_H
2 | #define CVPLOT_HIGHGUI_H
3 |
4 | #include
5 | #include
6 |
7 | #include "window.h"
8 |
9 | namespace cvplot {
10 |
11 | int createTrackbar(const std::string &trackbarname, const std::string &winname,
12 | int *value, int count, TrackbarCallback onChange = 0,
13 | void *userdata = 0);
14 | void destroyAllWindows();
15 | void destroyWindow(const std::string &view);
16 | int getMouseWheelDelta(int flags);
17 | int getTrackbarPos(const std::string &trackbarname, const std::string &winname);
18 | double getWindowProperty(const std::string &winname, int prop_id);
19 | void imshow(const std::string &view, void *img);
20 | void moveWindow(const std::string &view, int x, int y);
21 | void namedWindow(const std::string &view, int flags = 0);
22 | void resizeWindow(const std::string &view, int width, int height);
23 | void resizeWindow(const std::string &view, const Size &size);
24 | Rect selectROI(const std::string &windowName, void *img,
25 | bool showCrosshair = true, bool fromCenter = false);
26 | Rect selectROI(void *img, bool showCrosshair = true, bool fromCenter = false);
27 | void selectROIs(const std::string &windowName, void *img,
28 | std::vector &boundingBoxes, bool showCrosshair = true,
29 | bool fromCenter = false);
30 | void setMouseCallback(const std::string &view, MouseCallback onMouse,
31 | void *userdata = 0);
32 | void setTrackbarMax(const std::string &trackbarname, const std::string &winname,
33 | int maxval);
34 | void setTrackbarMin(const std::string &trackbarname, const std::string &winname,
35 | int minval);
36 | void setTrackbarPos(const std::string &trackbarname, const std::string &winname,
37 | int pos);
38 | void setWindowProperty(const std::string &winname, int prop_id,
39 | double prop_value);
40 | void setWindowTitle(const std::string &view, const std::string &title);
41 | int startWindowThread();
42 | int waitKey(int delay = 0);
43 | int waitKeyEx(int delay = 0);
44 |
45 | } // namespace cvplot
46 |
47 | #endif // CVPLOT_HIGHGUI_H
48 |
--------------------------------------------------------------------------------
/YOLOv8/bytetrack/include/kalmanFilter.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include
5 |
6 | #include
7 | #include
8 |
9 |
10 | namespace byte_track
11 | {
12 | typedef Eigen::Matrix DETECTBOX;
13 | typedef Eigen::Matrix DETECTBOXSS;
14 | typedef Eigen::Matrix FEATURE;
15 | typedef Eigen::Matrix FEATURESS;
16 | //typedef std::vector FEATURESS;
17 |
18 | //Kalmanfilter
19 | //typedef Eigen::Matrix KAL_FILTER;
20 | typedef Eigen::Matrix KAL_MEAN;
21 | typedef Eigen::Matrix KAL_COVA;
22 | typedef Eigen::Matrix KAL_HMEAN;
23 | typedef Eigen::Matrix KAL_HCOVA;
24 | typedef std::pair KAL_DATA;
25 | typedef std::pair KAL_HDATA;
26 |
27 | //using KAL_DATA = std::pair;
28 | //using KAL_HDATA = std::pair;
29 | class KalmanFilter
30 | {
31 | public:
32 | EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
33 |
34 | static const double chi2inv95[10];
35 | KalmanFilter();
36 | KAL_DATA initiate(const DETECTBOX& measurement);
37 | void predict(KAL_MEAN& mean, KAL_COVA& covariance);
38 | KAL_HDATA project(const KAL_MEAN& mean, const KAL_COVA& covariance);
39 | KAL_DATA update(const KAL_MEAN& mean,
40 | const KAL_COVA& covariance,
41 | const DETECTBOX& measurement);
42 |
43 | Eigen::Matrix gating_distance(
44 | const KAL_MEAN& mean,
45 | const KAL_COVA& covariance,
46 | const std::vector& measurements,
47 | bool only_position = false);
48 |
49 | private:
50 | Eigen::Matrix _motion_mat;
51 | Eigen::Matrix _update_mat;
52 | float _std_weight_position;
53 | float _std_weight_velocity;
54 | };
55 | }
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/types/dquat2mat.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, H. Strasdat, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef _DQUAT2MAT_H_
28 | #define _DQUAT2MAT_H_
29 | #include
30 | #include "g2o_types_slam3d_api.h"
31 |
32 | namespace g2o {
33 | namespace internal {
34 |
35 | void G2O_TYPES_SLAM3D_API compute_dq_dR ( Eigen::Matrix& dq_dR , const number_t& r11 , const number_t& r21 , const number_t& r31 , const number_t& r12 , const number_t& r22 , const number_t& r32 , const number_t& r13 , const number_t& r23 , const number_t& r33 );
36 |
37 | void G2O_TYPES_SLAM3D_API compute_dR_dq ( Eigen::Matrix& dR_dq , const number_t& qx , const number_t& qy , const number_t& qz , const number_t& qw ) ;
38 | }
39 | }
40 | #endif
41 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/models/zoedepth_nk/config_zoedepth_nk.json:
--------------------------------------------------------------------------------
1 | {
2 | "model": {
3 | "name": "ZoeDepthNK",
4 | "version_name": "v1",
5 | "bin_conf" : [
6 | {
7 | "name": "nyu",
8 | "n_bins": 64,
9 | "min_depth": 1e-3,
10 | "max_depth": 10.0
11 | },
12 | {
13 | "name": "kitti",
14 | "n_bins": 64,
15 | "min_depth": 1e-3,
16 | "max_depth": 80.0
17 | }
18 | ],
19 | "bin_embedding_dim": 128,
20 | "bin_centers_type": "softplus",
21 | "n_attractors":[16, 8, 4, 1],
22 | "attractor_alpha": 1000,
23 | "attractor_gamma": 2,
24 | "attractor_kind" : "mean",
25 | "attractor_type" : "inv",
26 | "min_temp": 0.0212,
27 | "max_temp": 50.0,
28 | "memory_efficient": true,
29 | "midas_model_type" : "DPT_BEiT_L_384",
30 | "img_size": [384, 512]
31 | },
32 |
33 | "train": {
34 | "train_midas": true,
35 | "use_pretrained_midas": true,
36 | "trainer": "zoedepth_nk",
37 | "epochs": 5,
38 | "bs": 16,
39 | "optim_kwargs": {"lr": 0.0002512, "wd": 0.01},
40 | "sched_kwargs": {"div_factor": 1, "final_div_factor": 10000, "pct_start": 0.7, "three_phase":false, "cycle_momentum": true},
41 | "same_lr": false,
42 | "w_si": 1,
43 | "w_domain": 100,
44 | "avoid_boundary": false,
45 | "random_crop": false,
46 | "input_width": 640,
47 | "input_height": 480,
48 | "w_grad": 0,
49 | "w_reg": 0,
50 | "midas_lr_factor": 10,
51 | "encoder_lr_factor":10,
52 | "pos_enc_lr_factor":10
53 | },
54 |
55 | "infer": {
56 | "train_midas": false,
57 | "pretrained_resource": "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_NK.pt",
58 | "use_pretrained_midas": false,
59 | "force_keep_ar": true
60 | },
61 |
62 | "eval": {
63 | "train_midas": false,
64 | "pretrained_resource": "url::https://github.com/isl-org/ZoeDepth/releases/download/v1.0/ZoeD_M12_NK.pt",
65 | "use_pretrained_midas": false
66 | }
67 | }
--------------------------------------------------------------------------------
/YOLOv8/include/detector_onnxruntime.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include
3 | #include
4 | #include
5 | #include "data_struct.h"
6 |
7 | class Detector_ONNXRUNTIME
8 | {
9 | public:
10 | Detector_ONNXRUNTIME();
11 |
12 | bool LoadModel(std::string& modelPath);
13 | BatchDetectedObject Run(MatVector& srcImgList);
14 |
15 | void setClassNames(std::vector newClassNamesList);
16 | void setBatchSize(int newBatch);
17 | void setInputSize(cv::Size newInputSize);
18 | std::string getClassName(int classId);
19 |
20 | void setDynamicClassNames(std::vector classNamesDynamicList);
21 | bool whetherInDynamicClass(std::string className);
22 |
23 | private:
24 | void Preprocessing(const std::vector& SrcImgs, std::vector& OutSrcImgs, std::vector& params);
25 | void LetterBox(const cv::Mat& image,
26 | cv::Mat& outImage,
27 | cv::Vec4d& params,
28 | const cv::Size& newShape = cv::Size(640, 640),
29 | bool autoShape = false,
30 | bool scaleFill = false,
31 | bool scaleUp = true,
32 | int stride = 32,
33 | const cv::Scalar& color = cv::Scalar(114, 114, 114));
34 | template
35 | T VectorProduct(const std::vector& v)
36 | {
37 | return std::accumulate(v.begin(), v.end(), 1, std::multiplies());
38 | };
39 | int _cudaID = 0;
40 | float _classThreshold = 0.25;
41 | float _nmsThreshold = 0.45;
42 | Ort::Session* _OrtSession = nullptr;
43 | Ort::Env _OrtEnv = Ort::Env(OrtLoggingLevel::ORT_LOGGING_LEVEL_ERROR, "Yolov8-Seg");
44 | std::shared_ptr _inputName, _output_name0;
45 | std::vector _inputNodeNames, _outputNodeNames;
46 | std::vector _inputTensorShape, _outputTensorShape;
47 | bool _isDynamicShape = false; //onnx support dynamic shape
48 | Ort::MemoryInfo _OrtMemoryInfo = Ort::MemoryInfo::CreateCpu(OrtAllocatorType::OrtDeviceAllocator, OrtMemType::OrtMemTypeCPUOutput);
49 |
50 | std::vector _classNamesList;
51 | std::vector _classNamesDynamicList;
52 | int _batchSize = 1;
53 | cv::Size _inputSize = cv::Size(640, 640);
54 | };
55 |
--------------------------------------------------------------------------------
/ZoeDepth/zoedepth/trainers/builder.py:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | # Copyright (c) 2022 Intelligent Systems Lab Org
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 |
23 | # File author: Shariq Farooq Bhat
24 |
25 | from importlib import import_module
26 |
27 |
28 | def get_trainer(config):
29 | """Builds and returns a trainer based on the config.
30 |
31 | Args:
32 | config (dict): the config dict (typically constructed using utils.config.get_config)
33 | config.trainer (str): the name of the trainer to use. The module named "{config.trainer}_trainer" must exist in trainers root module
34 |
35 | Raises:
36 | ValueError: If the specified trainer does not exist under trainers/ folder
37 |
38 | Returns:
39 | Trainer (inherited from zoedepth.trainers.BaseTrainer): The Trainer object
40 | """
41 | assert "trainer" in config and config.trainer is not None and config.trainer != '', "Trainer not specified. Config: {0}".format(
42 | config)
43 | try:
44 | Trainer = getattr(import_module(
45 | f"zoedepth.trainers.{config.trainer}_trainer"), 'Trainer')
46 | except ModuleNotFoundError as e:
47 | raise ValueError(f"Trainer {config.trainer}_trainer not found.") from e
48 | return Trainer
49 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/stuff/os_specific.c:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #include "os_specific.h"
28 |
29 | #ifdef WINDOWS
30 |
31 | int vasprintf(char** strp, const char* fmt, va_list ap)
32 | {
33 | int n;
34 | int size = 100;
35 | char* p;
36 | char* np;
37 |
38 | if ((p = (char*)malloc(size * sizeof(char))) == NULL)
39 | return -1;
40 |
41 | while (1) {
42 | #ifdef _MSC_VER
43 | n = vsnprintf_s(p, size, size - 1, fmt, ap);
44 | #else
45 | n = vsnprintf(p, size, fmt, ap);
46 | #endif
47 | if (n > -1 && n < size) {
48 | *strp = p;
49 | return n;
50 | }
51 | if (n > -1)
52 | size = n+1;
53 | else
54 | size *= 2;
55 | if ((np = (char*)realloc (p, size * sizeof(char))) == NULL) {
56 | free(p);
57 | return -1;
58 | } else
59 | p = np;
60 | }
61 | }
62 |
63 |
64 | #endif
65 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | build/*
3 | lib/*
4 | example/vdo_slam
5 |
6 | dynamic_slam_graph_after_opt.g2o
7 | dynamic_slam_graph_before_opt.g2o
8 | debug.txt
9 | local_ba_before.g2o
10 | local_ba_after.g2o
11 |
12 |
13 | dependencies/g2o/build/*
14 | track_distribution.txt
15 | track_distribution_static.txt
16 | onnxruntime-linux-x64-1.16.3/*
17 | .vscode/
18 |
19 | VideoFlow/VideoFlow_ckpt/MOF_kitti.pth
20 | VideoFlow/VideoFlow_ckpt/twins_svt_large-90f6aaa9.pth
21 | ZoeDepth/dpt_beit_large_384.pt
22 | ZoeDepth/ZoeD_M12_K.pt
23 | ZoeDepth/ZoeD_M12_NK.pt
24 | yolov8s-seg.onnx
25 |
26 | # Byte-compiled / optimized / DLL files
27 | __pycache__/
28 | *.py[cod]
29 | *$py.class
30 |
31 | # C extensions
32 | *.so
33 |
34 | # Distribution / packaging
35 | .Python
36 | build/
37 | develop-eggs/
38 | dist/
39 | downloads/
40 | eggs/
41 | .eggs/
42 | lib/
43 | lib64/
44 | parts/
45 | sdist/
46 | var/
47 | wheels/
48 | *.egg-info/
49 | .installed.cfg
50 | *.egg
51 | MANIFEST
52 |
53 | # PyInstaller
54 | # Usually these files are written by a python script from a template
55 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
56 | *.manifest
57 | *.spec
58 |
59 | # Installer logs
60 | pip-log.txt
61 | pip-delete-this-directory.txt
62 |
63 | # Unit test / coverage reports
64 | htmlcov/
65 | .tox/
66 | .coverage
67 | .coverage.*
68 | .cache
69 | nosetests.xml
70 | coverage.xml
71 | *.cover
72 | .hypothesis/
73 | .pytest_cache/
74 |
75 | # Translations
76 | *.mo
77 | *.pot
78 |
79 | # Django stuff:
80 | *.log
81 | local_settings.py
82 | db.sqlite3
83 |
84 | # Flask stuff:
85 | instance/
86 | .webassets-cache
87 |
88 | # Scrapy stuff:
89 | .scrapy
90 |
91 | # Sphinx documentation
92 | docs/_build/
93 |
94 | # PyBuilder
95 | target/
96 |
97 | # Jupyter Notebook
98 | .ipynb_checkpoints
99 |
100 | # pyenv
101 | .python-version
102 |
103 | # celery beat schedule file
104 | celerybeat-schedule
105 |
106 | # SageMath parsed files
107 | *.sage.py
108 |
109 | # Environments
110 | .env
111 | .venv
112 | env/
113 | venv/
114 | ENV/
115 | env.bak/
116 | venv.bak/
117 |
118 | # Spyder project settings
119 | .spyderproject
120 | .spyproject
121 |
122 | # Rope project settings
123 | .ropeproject
124 |
125 | # mkdocs documentation
126 | /site
127 |
128 | # mypy
129 | .mypy_cache/
130 |
131 | *.pfm
132 | *.pt
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/base_vertex.hpp:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | template
28 | BaseVertex::BaseVertex() :
29 | OptimizableGraph::Vertex(),
30 | _hessian(0, D, D)
31 | {
32 | _dimension = D;
33 | }
34 |
35 | template
36 | double BaseVertex::solveDirect(double lambda) {
37 | Matrix tempA=_hessian + Matrix ::Identity()*lambda;
38 | double det=tempA.determinant();
39 | if (g2o_isnan(det) || det < std::numeric_limits::epsilon())
40 | return det;
41 | Matrix dx=tempA.llt().solve(_b);
42 | oplus(&dx[0]);
43 | return det;
44 | }
45 |
46 | template
47 | void BaseVertex::clearQuadraticForm() {
48 | _b.setZero();
49 | }
50 |
51 | template
52 | void BaseVertex::mapHessianMemory(double* d)
53 | {
54 | new (&_hessian) HessianBlockType(d, D, D);
55 | }
56 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/parameter.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_GRAPH_PARAMETER_HH_
28 | #define G2O_GRAPH_PARAMETER_HH_
29 |
30 | #include
31 |
32 | #include "hyper_graph.h"
33 |
34 | namespace g2o {
35 |
36 | class Parameter : public HyperGraph::HyperGraphElement
37 | {
38 | public:
39 | Parameter();
40 | virtual ~Parameter() {};
41 | //! read the data from a stream
42 | virtual bool read(std::istream& is) = 0;
43 | //! write the data to a stream
44 | virtual bool write(std::ostream& os) const = 0;
45 | int id() const {return _id;}
46 | void setId(int id_);
47 | virtual HyperGraph::HyperGraphElementType elementType() const { return HyperGraph::HGET_PARAMETER;}
48 | protected:
49 | int _id;
50 | };
51 |
52 | typedef std::vector ParameterVector;
53 |
54 | } // end namespace
55 |
56 | #endif
57 |
--------------------------------------------------------------------------------
/example/kitti-0018-0020.yaml:
--------------------------------------------------------------------------------
1 | %YAML:1.0
2 |
3 | #--------------------------------------------------------------------------------------------
4 | # Camera Parameters. Adjust them!
5 | #--------------------------------------------------------------------------------------------
6 |
7 | # Camera calibration and distortion parameters (OpenCV)
8 | Camera.fx: 718.8560
9 | Camera.fy: 718.8560
10 | Camera.cx: 607.1928
11 | Camera.cy: 185.2157
12 |
13 | Camera.k1: 0.0
14 | Camera.k2: 0.0
15 | Camera.p1: 0.0
16 | Camera.p2: 0.0
17 |
18 | Camera.width: 1242
19 | Camera.height: 375
20 |
21 | # Camera frames per second
22 | Camera.fps: 10.0
23 |
24 | # stereo baseline times fx
25 | Camera.bf: 388.1822
26 |
27 | # Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
28 | Camera.RGB: 1
29 |
30 | #--------------------------------------------------------------------------------------------
31 | # System Parameters
32 | #--------------------------------------------------------------------------------------------
33 |
34 | # Switch for Dataset (1 for Oxford MultiMotion Dataset, 2 for KITTI dataset, 3...)
35 | ChooseData: 2
36 |
37 | # Deptmap values factor
38 | DepthMapFactor: 256.0
39 |
40 | # Close/Far Depth threshold
41 | ThDepthBG: 40.0
42 | ThDepthOBJ: 25.0
43 |
44 | # Max Tracking Points on Background and Object in each frame
45 | MaxTrackPointBG: 1200 # 1200
46 | MaxTrackPointOBJ: 800 # 800
47 |
48 | # Scene Flow Magnitude and Distribution Threshold
49 | SFMgThres: 0.12 # 0.05
50 | SFDsThres: 0.3 # 0.99
51 |
52 | # Window Size and Overlapping Size for Local Batch Optimization
53 | WINDOW_SIZE: 20
54 | OVERLAP_SIZE: 4
55 |
56 | # Use sampled feature or detected feature for background (1: sampled, 0: detected)
57 | UseSampleFeature: 0
58 |
59 | #--------------------------------------------------------------------------------------------
60 | # ORB Parameters (NOTE: only FAST corners are used in this system.)
61 | #--------------------------------------------------------------------------------------------
62 |
63 | # ORB Extractor: Number of features per image
64 | ORBextractor.nFeatures: 2500
65 |
66 | # ORB Extractor: Scale factor between levels in the scale pyramid
67 | ORBextractor.scaleFactor: 1.2
68 |
69 | # ORB Extractor: Number of levels in the scale pyramid
70 | ORBextractor.nLevels: 8
71 |
72 | # ORB Extractor: Fast threshold
73 | ORBextractor.iniThFAST: 20
74 | ORBextractor.minThFAST: 7
75 |
76 |
--------------------------------------------------------------------------------
/dependencies/g2o/g2o/core/optimization_algorithm_gauss_newton.h:
--------------------------------------------------------------------------------
1 | // g2o - General Graph Optimization
2 | // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 | // All rights reserved.
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright notice,
10 | // this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above copyright
12 | // notice, this list of conditions and the following disclaimer in the
13 | // documentation and/or other materials provided with the distribution.
14 | //
15 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 | // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 | // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 | // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 | // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 | // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 | // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 | // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 | // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
27 | #ifndef G2O_OPTIMIZATION_ALGORITHM_GAUSS_NEWTON_H
28 | #define G2O_OPTIMIZATION_ALGORITHM_GAUSS_NEWTON_H
29 |
30 | #include "optimization_algorithm_with_hessian.h"
31 |
32 | namespace g2o {
33 |
34 | /**
35 | * \brief Implementation of the Gauss Newton Algorithm
36 | */
37 | class OptimizationAlgorithmGaussNewton : public OptimizationAlgorithmWithHessian
38 | {
39 | public:
40 | /**
41 | * construct the Gauss Newton algorithm, which use the given Solver for solving the
42 | * linearized system.
43 | */
44 | explicit OptimizationAlgorithmGaussNewton(Solver* solver);
45 | virtual ~OptimizationAlgorithmGaussNewton();
46 |
47 | virtual SolverResult solve(int iteration, bool online = false);
48 |
49 | virtual void printVerbose(std::ostream& os) const;
50 | };
51 |
52 | } // end namespace
53 |
54 | #endif
55 |
--------------------------------------------------------------------------------
/ZoeDepth/MiDaS/tf/utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import sys
3 | import cv2
4 |
5 |
6 | def write_pfm(path, image, scale=1):
7 | """Write pfm file.
8 | Args:
9 | path (str): pathto file
10 | image (array): data
11 | scale (int, optional): Scale. Defaults to 1.
12 | """
13 |
14 | with open(path, "wb") as file:
15 | color = None
16 |
17 | if image.dtype.name != "float32":
18 | raise Exception("Image dtype must be float32.")
19 |
20 | image = np.flipud(image)
21 |
22 | if len(image.shape) == 3 and image.shape[2] == 3: # color image
23 | color = True
24 | elif (
25 | len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1
26 | ): # greyscale
27 | color = False
28 | else:
29 | raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
30 |
31 | file.write("PF\n" if color else "Pf\n".encode())
32 | file.write("%d %d\n".encode() % (image.shape[1], image.shape[0]))
33 |
34 | endian = image.dtype.byteorder
35 |
36 | if endian == "<" or endian == "=" and sys.byteorder == "little":
37 | scale = -scale
38 |
39 | file.write("%f\n".encode() % scale)
40 |
41 | image.tofile(file)
42 |
43 | def read_image(path):
44 | """Read image and output RGB image (0-1).
45 | Args:
46 | path (str): path to file
47 | Returns:
48 | array: RGB image (0-1)
49 | """
50 | img = cv2.imread(path)
51 |
52 | if img.ndim == 2:
53 | img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
54 |
55 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) / 255.0
56 |
57 | return img
58 |
59 | def write_depth(path, depth, bits=1):
60 | """Write depth map to pfm and png file.
61 | Args:
62 | path (str): filepath without extension
63 | depth (array): depth
64 | """
65 | write_pfm(path + ".pfm", depth.astype(np.float32))
66 |
67 | depth_min = depth.min()
68 | depth_max = depth.max()
69 |
70 | max_val = (2**(8*bits))-1
71 |
72 | if depth_max - depth_min > np.finfo("float").eps:
73 | out = max_val * (depth - depth_min) / (depth_max - depth_min)
74 | else:
75 | out = 0
76 |
77 | if bits == 1:
78 | cv2.imwrite(path + ".png", out.astype("uint8"))
79 | elif bits == 2:
80 | cv2.imwrite(path + ".png", out.astype("uint16"))
81 |
82 | return
--------------------------------------------------------------------------------