├── docs ├── GRIP-MQP-2016.pdf └── presentation │ ├── GRIP_Poster_24x36.pdf │ ├── GRIP_Poster_36x48.pdf │ ├── GRIP_Powerpoint.pptx │ ├── GRIP_Poster_24x36.pptx │ └── GRIP_Poster_36x48.pptx ├── samples └── images │ ├── gompei.jpeg │ ├── fall-gompei.jpeg │ ├── aerial-assist.jpeg │ ├── rebound-rumble.jpeg │ ├── winter-gompei.jpeg │ └── ultimate-ascent.jpeg ├── ui ├── src │ ├── test │ │ ├── java │ │ │ └── edu │ │ │ │ └── wpi │ │ │ │ └── grip │ │ │ │ └── ui │ │ │ │ ├── UiTests.java │ │ │ │ ├── codegeneration │ │ │ │ ├── tools │ │ │ │ │ ├── GenType.java │ │ │ │ │ ├── PipelineInterfacer.java │ │ │ │ │ └── PyLine.java │ │ │ │ └── GenerationTesting.java │ │ │ │ └── util │ │ │ │ ├── MockGripPlatform.java │ │ │ │ └── TestAnnotationFXMLLoader.java │ │ └── resources │ │ │ └── edu │ │ │ └── wpi │ │ │ └── grip │ │ │ └── ui │ │ │ └── codegeneration │ │ │ └── tools │ │ │ ├── pipe │ │ │ └── AbsPipeline.cpp │ │ │ └── Handle.h │ └── main │ │ ├── resources │ │ └── edu │ │ │ └── wpi │ │ │ └── grip │ │ │ └── ui │ │ │ ├── codegeneration │ │ │ ├── python │ │ │ │ ├── enums │ │ │ │ │ ├── FlipCode.vm │ │ │ │ │ ├── Origin.vm │ │ │ │ │ └── BlurType.vm │ │ │ │ ├── operations │ │ │ │ │ ├── CV_transpose.vm │ │ │ │ │ ├── CV_bitwise_not.vm │ │ │ │ │ ├── CV_bitwise_xor.vm │ │ │ │ │ ├── CV_max.vm │ │ │ │ │ ├── CV_min.vm │ │ │ │ │ ├── CV_add.vm │ │ │ │ │ ├── New_Point.vm │ │ │ │ │ ├── CV_subtract.vm │ │ │ │ │ ├── CV_bitwise_or.vm │ │ │ │ │ ├── CV_bitwise_and.vm │ │ │ │ │ ├── New_Size.vm │ │ │ │ │ ├── CV_absdiff.vm │ │ │ │ │ ├── CV_applyColorMap.vm │ │ │ │ │ ├── CV_cvtColor.vm │ │ │ │ │ ├── CV_medianBlur.vm │ │ │ │ │ ├── CV_extractChannel.vm │ │ │ │ │ ├── Mask.vm │ │ │ │ │ ├── CV_flip.vm │ │ │ │ │ ├── CV_multiply.vm │ │ │ │ │ ├── CV_divide.vm │ │ │ │ │ ├── CV_scaleAdd.vm │ │ │ │ │ ├── CV_compare.vm │ │ │ │ │ ├── Valve.vm │ │ │ │ │ ├── Normalize.vm │ │ │ │ │ ├── Lines.vm │ │ │ │ │ ├── Convex_Hulls.vm │ │ │ │ │ ├── Switch.vm │ │ │ │ │ ├── CV_Threshold.vm │ │ │ │ │ ├── CV_addWeighted.vm │ │ │ │ │ ├── CV_resize.vm │ │ │ │ │ ├── Find_Min_and_Max.vm │ │ │ │ │ ├── Resize_Image.vm │ │ │ │ │ ├── Distance_Transform.vm │ │ │ │ │ ├── CV_Laplacian.vm │ │ │ │ │ ├── RGB_Threshold.vm │ │ │ │ │ ├── Threshold_Moving.vm │ │ │ │ │ ├── Desaturate.vm │ │ │ │ │ ├── HSV_Threshold.vm │ │ │ │ │ ├── HSL_Threshold.vm │ │ │ │ │ ├── CV_Canny.vm │ │ │ │ │ ├── CV_Sobel.vm │ │ │ │ │ ├── Find_Contours.vm │ │ │ │ │ ├── CV_erode.vm │ │ │ │ │ ├── CV_dilate.vm │ │ │ │ │ ├── Watershed.vm │ │ │ │ │ ├── Get_Mat_Info.vm │ │ │ │ │ ├── CV_GaussianBlur.vm │ │ │ │ │ ├── Filter_Lines.vm │ │ │ │ │ └── CV_adaptiveThreshold.vm │ │ │ │ └── step.vm │ │ │ ├── java │ │ │ │ ├── MemberSources.vm │ │ │ │ ├── CV Step.vm │ │ │ │ ├── operations │ │ │ │ │ ├── CV_transpose.vm │ │ │ │ │ ├── CV_bitwise_not.vm │ │ │ │ │ ├── CV_add.vm │ │ │ │ │ ├── CV_max.vm │ │ │ │ │ ├── CV_min.vm │ │ │ │ │ ├── CV_medianBlur.vm │ │ │ │ │ ├── CV_cvtColor.vm │ │ │ │ │ ├── CV_subtract.vm │ │ │ │ │ ├── New_Point.vm │ │ │ │ │ ├── CV_bitwise_or.vm │ │ │ │ │ ├── CV_extractChannel.vm │ │ │ │ │ ├── CV_absdiff.vm │ │ │ │ │ ├── CV_bitwise_and.vm │ │ │ │ │ ├── CV_bitwise_xor.vm │ │ │ │ │ ├── New_Size.vm │ │ │ │ │ ├── CV_applyColorMap.vm │ │ │ │ │ ├── CV_divide.vm │ │ │ │ │ ├── CV_multiply.vm │ │ │ │ │ ├── CV_scaleAdd.vm │ │ │ │ │ ├── CV_compare.vm │ │ │ │ │ ├── Valve.vm │ │ │ │ │ ├── Mask.vm │ │ │ │ │ ├── Normalize.vm │ │ │ │ │ ├── CV_Threshold.vm │ │ │ │ │ ├── Switch.vm │ │ │ │ │ ├── CV_addWeighted.vm │ │ │ │ │ ├── Distance_Transform.vm │ │ │ │ │ ├── CV_resize.vm │ │ │ │ │ ├── Resize_Image.vm │ │ │ │ │ ├── CV_Laplacian.vm │ │ │ │ │ ├── Lines.vm │ │ │ │ │ ├── CV_Canny.vm │ │ │ │ │ ├── RGB_Threshold.vm │ │ │ │ │ ├── CV_GaussianBlur.vm │ │ │ │ │ ├── HSV_Threshold.vm │ │ │ │ │ ├── HSL_Threshold.vm │ │ │ │ │ ├── Threshold_Moving.vm │ │ │ │ │ ├── CV_flip.vm │ │ │ │ │ ├── CV_Sobel.vm │ │ │ │ │ ├── CV_adaptiveThreshold.vm │ │ │ │ │ ├── Desaturate.vm │ │ │ │ │ ├── Find_Contours.vm │ │ │ │ │ ├── CV_rectangle.vm │ │ │ │ │ ├── Filter_Lines.vm │ │ │ │ │ ├── CV_erode.vm │ │ │ │ │ ├── CV_dilate.vm │ │ │ │ │ ├── Find_Min_and_Max.vm │ │ │ │ │ └── Find_Lines.vm │ │ │ │ ├── BasicStep.vm │ │ │ │ └── Switch.vm │ │ │ └── cpp │ │ │ │ ├── operations │ │ │ │ ├── CV_transpose.vm │ │ │ │ ├── CV_bitwise_not.vm │ │ │ │ ├── CV_medianBlur.vm │ │ │ │ ├── CV_add.vm │ │ │ │ ├── CV_max.vm │ │ │ │ ├── CV_min.vm │ │ │ │ ├── CV_cvtColor.vm │ │ │ │ ├── CV_subtract.vm │ │ │ │ ├── New_Point.vm │ │ │ │ ├── CV_flip.vm │ │ │ │ ├── New_Size.vm │ │ │ │ ├── CV_bitwise_or.vm │ │ │ │ ├── CV_extractChannel.vm │ │ │ │ ├── CV_absdiff.vm │ │ │ │ ├── CV_bitwise_and.vm │ │ │ │ ├── CV_bitwise_xor.vm │ │ │ │ ├── CV_applyColorMap.vm │ │ │ │ ├── CV_divide.vm │ │ │ │ ├── CV_multiply.vm │ │ │ │ ├── CV_scaleAdd.vm │ │ │ │ ├── CV_compare.vm │ │ │ │ ├── Valve.vm │ │ │ │ ├── CV_resize.vm │ │ │ │ ├── Mask.vm │ │ │ │ ├── CV_Threshold.vm │ │ │ │ ├── Normalize.vm │ │ │ │ ├── CV_addWeighted.vm │ │ │ │ ├── Switch.vm │ │ │ │ ├── CV_GaussianBlur.vm │ │ │ │ ├── CV_Laplacian.vm │ │ │ │ ├── Resize_Image.vm │ │ │ │ ├── Convex_Hulls.vm │ │ │ │ ├── Find_Contours.vm │ │ │ │ ├── CV_Canny.vm │ │ │ │ ├── HSV_Threshold.vm │ │ │ │ ├── RGB_Threshold.vm │ │ │ │ ├── Find_Min_and_Max.vm │ │ │ │ ├── CV_erode.vm │ │ │ │ ├── CV_dilate.vm │ │ │ │ ├── CV_Sobel.vm │ │ │ │ ├── Threshold_Moving.vm │ │ │ │ ├── CV_adaptiveThreshold.vm │ │ │ │ ├── HSL_Threshold.vm │ │ │ │ ├── CV_rectangle.vm │ │ │ │ ├── Desaturate.vm │ │ │ │ ├── Distance_Transform.vm │ │ │ │ ├── Filter_Lines.vm │ │ │ │ ├── Watershed.vm │ │ │ │ ├── Get_Mat_Info.vm │ │ │ │ └── Cascade_Classifier.vm │ │ │ │ └── Step.vm │ │ │ ├── icons │ │ │ ├── new.png │ │ │ ├── up.png │ │ │ ├── blur.png │ │ │ ├── crop.png │ │ │ ├── delete.png │ │ │ ├── deploy.png │ │ │ ├── down.png │ │ │ ├── export.png │ │ │ ├── first.png │ │ │ ├── grip.png │ │ │ ├── left.png │ │ │ ├── mask.png │ │ │ ├── next.png │ │ │ ├── open.png │ │ │ ├── opencv.png │ │ │ ├── pause.png │ │ │ ├── point.png │ │ │ ├── python.png │ │ │ ├── resize.png │ │ │ ├── right.png │ │ │ ├── save.png │ │ │ ├── size.png │ │ │ ├── start.png │ │ │ ├── stop.png │ │ │ ├── preview.png │ │ │ ├── previous.png │ │ │ ├── publish.png │ │ │ ├── save-as.png │ │ │ ├── settings.png │ │ │ ├── warning.png │ │ │ ├── add-image.png │ │ │ ├── add-webcam.png │ │ │ ├── desaturate.png │ │ │ ├── find-blobs.png │ │ │ ├── find-lines.png │ │ │ ├── rosorg-logo.png │ │ │ ├── threshold.png │ │ │ ├── convex-hulls.png │ │ │ ├── filter-lines.png │ │ │ ├── find-contours.png │ │ │ └── publish-video.png │ │ │ ├── roboto │ │ │ ├── Roboto-Bold.ttf │ │ │ ├── Roboto-Italic.ttf │ │ │ ├── Roboto-Regular.ttf │ │ │ └── Roboto-BoldItalic.ttf │ │ │ ├── OperationList.fxml │ │ │ └── preview │ │ │ └── Previews.fxml │ │ └── java │ │ └── edu │ │ └── wpi │ │ └── grip │ │ ├── ui │ │ ├── dragging │ │ │ ├── StepDragService.java │ │ │ └── OperationDragService.java │ │ ├── events │ │ │ └── SetStepsExpandedEvent.java │ │ ├── codegeneration │ │ │ └── data │ │ │ │ └── TOutput.java │ │ ├── Controller.java │ │ ├── util │ │ │ └── SupplierWithIO.java │ │ ├── UICommandLineHelper.java │ │ └── AboutDialogController.java │ │ └── core │ │ └── settings │ │ ├── AppSettingsBeanInfo.java │ │ └── ProjectSettingsBeanInfo.java ├── preloader │ ├── src │ │ └── main │ │ │ ├── resources │ │ │ └── edu │ │ │ │ └── wpi │ │ │ │ └── grip │ │ │ │ └── preloader │ │ │ │ ├── Preloader.css │ │ │ │ └── grip.png │ │ │ └── java │ │ │ └── edu │ │ │ └── wpi │ │ │ └── grip │ │ │ └── preloader │ │ │ └── Launch.java │ └── preloader.gradle.kts ├── installer-files │ ├── linux │ │ └── GRIP.png │ ├── mac │ │ └── GRIP.icns │ └── win │ │ ├── grip_TP6_icon.ico │ │ ├── GRIP-setup-icon.bmp │ │ └── file-associations.properties └── linuxLauncher │ └── linuxLauncher.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── .github ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── gradle-wrapper-validation.yml │ └── deploy-javadoc.yml ├── core ├── libs │ └── opencv-3.0.0-1.1-linux-frc.jar └── src │ ├── test │ ├── resources │ │ └── edu │ │ │ └── wpi │ │ │ └── grip │ │ │ ├── images │ │ │ ├── NotAnImage.txt │ │ │ ├── gompei.jpeg │ │ │ ├── GRIP_Logo.png │ │ │ └── testphoto.png │ │ │ └── scripts │ │ │ ├── addition.py │ │ │ ├── addition-wrong-output-count.py │ │ │ ├── addition-wrong-output-type.py │ │ │ ├── addition-subtraction.py │ │ │ └── addition-with-name-and-description.py │ └── java │ │ └── edu │ │ └── wpi │ │ └── grip │ │ └── core │ │ ├── settings │ │ └── SettingsSanityTest.java │ │ ├── operations │ │ └── network │ │ │ ├── MockNetworkReceiver.java │ │ │ ├── ros │ │ │ ├── MockROSManager.java │ │ │ └── ROSPackageSanityTest.java │ │ │ ├── NetworkPackageSanityTest.java │ │ │ └── networktables │ │ │ └── NetworkTablesSanityTest.java │ │ ├── sockets │ │ ├── MockOutputSocketFactory.java │ │ ├── MockInputSocket.java │ │ ├── MockOutputSocket.java │ │ ├── MockInputSocketFactory.java │ │ └── SocketsSanityTest.java │ │ ├── util │ │ ├── MockFileManager.java │ │ ├── service │ │ │ └── ServiceSanityTest.java │ │ ├── MockExceptionWitness.java │ │ └── UtilitySanityTest.java │ │ ├── events │ │ └── EventsSanityTest.java │ │ ├── sources │ │ ├── MockFrameGrabberFactory.java │ │ └── SimpleMockFrameGrabber.java │ │ ├── MockOperation.java │ │ ├── MockStep.java │ │ ├── MockConnection.java │ │ └── ManualPipelineRunner.java │ └── main │ └── java │ └── edu │ └── wpi │ └── grip │ ├── core │ ├── util │ │ ├── service │ │ │ ├── package-info.java │ │ │ └── ServiceRestartPolicy.java │ │ └── GripMode.java │ ├── settings │ │ ├── Settings.java │ │ └── Setting.java │ ├── events │ │ ├── StopPipelineEvent.java │ │ ├── RunStartedEvent.java │ │ ├── RenderEvent.java │ │ ├── StartSingleBenchmarkRunEvent.java │ │ ├── RunStoppedEvent.java │ │ ├── LoggableEvent.java │ │ ├── RunPipelineEvent.java │ │ ├── BenchmarkEvent.java │ │ ├── DirtiesSaveEvent.java │ │ ├── AppSettingsChangedEvent.java │ │ ├── ExceptionClearedEvent.java │ │ ├── CodeGenerationSettingsChangedEvent.java │ │ ├── EventLogger.java │ │ └── ProjectSettingsChangedEvent.java │ ├── cuda │ │ ├── CudaAccelerationMode.java │ │ ├── NullAccelerationMode.java │ │ ├── NullCudaDetector.java │ │ ├── AccelerationMode.java │ │ └── CudaDetector.java │ ├── operations │ │ ├── network │ │ │ ├── Manager.java │ │ │ ├── Publishable.java │ │ │ ├── MapNetworkReceiverFactory.java │ │ │ ├── ros │ │ │ │ ├── ROSNetworkPublisherFactory.java │ │ │ │ └── ROSMessagePublisher.java │ │ │ ├── MapNetworkPublisherFactory.java │ │ │ ├── PublishableRosProxy.java │ │ │ ├── BooleanPublishable.java │ │ │ └── NumberPublishable.java │ │ ├── templated │ │ │ └── package-info.java │ │ └── opencv │ │ │ └── enumeration │ │ │ └── FlipCode.java │ ├── GripFileModule.java │ ├── sources │ │ ├── CameraSourceUpdater.java │ │ └── GripSourcesHardwareModule.java │ ├── PreviousNext.java │ ├── FileManager.java │ ├── exception │ │ ├── GripServerException.java │ │ └── GripException.java │ ├── observables │ │ └── Observer.java │ ├── StepIndexer.java │ └── sockets │ │ ├── OutputSocket.java │ │ └── NoSocketTypeLabel.java │ └── generated │ ├── opencv_core │ └── enumeration │ │ ├── HammingEnum.java │ │ ├── RNGEnum.java │ │ ├── Enum.java │ │ ├── SparseMatEnum.java │ │ ├── GemmFlagsEnum.java │ │ ├── FileStorageEnum.java │ │ ├── LineTypesEnum.java │ │ ├── FormatterEnum.java │ │ ├── PCAFlagsEnum.java │ │ ├── MatEnum.java │ │ ├── UMatEnum.java │ │ ├── TermCriteriaTypeEnum.java │ │ ├── UMatUsageFlagsEnum.java │ │ ├── ParamEnum.java │ │ ├── NormTypesEnum.java │ │ ├── UMatDataEnum.java │ │ ├── CmpTypesEnum.java │ │ ├── ReduceTypesEnum.java │ │ ├── SolveLPResultEnum.java │ │ └── SortFlagsEnum.java │ └── opencv_imgproc │ └── enumeration │ ├── UndistortTypesEnum.java │ ├── InterpolationMasksEnum.java │ ├── DistanceTransformMasksEnum.java │ ├── DistanceTypesEnum.java │ ├── GrabCutClassesEnum.java │ ├── RectanglesIntersectTypesEnum.java │ ├── DistanceTransformLabelTypesEnum.java │ ├── LineSegmentDetectorModesEnum.java │ ├── MorphShapesEnum.java │ └── FloodFillFlagsEnum.java ├── codecov.yml ├── annotation ├── src │ └── main │ │ └── java │ │ └── edu │ │ └── wpi │ │ └── grip │ │ └── annotation │ │ └── operation │ │ ├── OperationCategory.java │ │ └── PublishableObject.java ├── annotation.gradle.kts └── README.md ├── config └── checkstyle │ └── checkstyleSuppressions.xml └── settings.gradle /docs/GRIP-MQP-2016.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/GRIP-MQP-2016.pdf -------------------------------------------------------------------------------- /samples/images/gompei.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/gompei.jpeg -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/UiTests.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui; 2 | 3 | public interface UiTests { 4 | } 5 | -------------------------------------------------------------------------------- /samples/images/fall-gompei.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/fall-gompei.jpeg -------------------------------------------------------------------------------- /ui/preloader/src/main/resources/edu/wpi/grip/preloader/Preloader.css: -------------------------------------------------------------------------------- 1 | .root { 2 | -fx-background-color: #eee; 3 | } 4 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /samples/images/aerial-assist.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/aerial-assist.jpeg -------------------------------------------------------------------------------- /samples/images/rebound-rumble.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/rebound-rumble.jpeg -------------------------------------------------------------------------------- /samples/images/winter-gompei.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/winter-gompei.jpeg -------------------------------------------------------------------------------- /ui/installer-files/linux/GRIP.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/installer-files/linux/GRIP.png -------------------------------------------------------------------------------- /ui/installer-files/mac/GRIP.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/installer-files/mac/GRIP.icns -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | [//]: # (Please ensure that the "Allow edits from maintainers" checkbox is checked. Thanks!) 2 | -------------------------------------------------------------------------------- /samples/images/ultimate-ascent.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/samples/images/ultimate-ascent.jpeg -------------------------------------------------------------------------------- /docs/presentation/GRIP_Poster_24x36.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/presentation/GRIP_Poster_24x36.pdf -------------------------------------------------------------------------------- /docs/presentation/GRIP_Poster_36x48.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/presentation/GRIP_Poster_36x48.pdf -------------------------------------------------------------------------------- /docs/presentation/GRIP_Powerpoint.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/presentation/GRIP_Powerpoint.pptx -------------------------------------------------------------------------------- /core/libs/opencv-3.0.0-1.1-linux-frc.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/core/libs/opencv-3.0.0-1.1-linux-frc.jar -------------------------------------------------------------------------------- /docs/presentation/GRIP_Poster_24x36.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/presentation/GRIP_Poster_24x36.pptx -------------------------------------------------------------------------------- /docs/presentation/GRIP_Poster_36x48.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/docs/presentation/GRIP_Poster_36x48.pptx -------------------------------------------------------------------------------- /ui/installer-files/win/grip_TP6_icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/installer-files/win/grip_TP6_icon.ico -------------------------------------------------------------------------------- /ui/installer-files/win/GRIP-setup-icon.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/installer-files/win/GRIP-setup-icon.bmp -------------------------------------------------------------------------------- /ui/installer-files/win/file-associations.properties: -------------------------------------------------------------------------------- 1 | extension=grip 2 | mime-type=text/plain 3 | description=GRIP pipeline 4 | icon=grip_TP6_icon.ico 5 | -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/images/NotAnImage.txt: -------------------------------------------------------------------------------- 1 | I'm not an image! What a surprise! 2 | This is for testing reading in text files using loadImage. -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/enums/FlipCode.vm: -------------------------------------------------------------------------------- 1 | FlipCode = Enum('FlipCode', [('X_AXIS' , 0), ('Y_AXIS', 1), ('BOTH_AXES', -1)]) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/enums/Origin.vm: -------------------------------------------------------------------------------- 1 | Origin = Enum('Origin', 'Top_Left Top_Right Bottom_Left Bottom_Right Center') 2 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/enums/BlurType.vm: -------------------------------------------------------------------------------- 1 | BlurType = Enum('BlurType', 'Box_Blur Gaussian_Blur Median_Filter Bilateral_Filter') -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/new.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/up.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/blur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/blur.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/crop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/crop.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/delete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/delete.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/deploy.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/down.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/export.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/export.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/first.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/first.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/grip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/grip.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/left.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/left.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/mask.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/next.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/next.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/open.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/open.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/opencv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/opencv.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/pause.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/pause.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/point.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/python.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/python.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/resize.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/resize.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/right.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/right.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/save.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/size.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/size.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/start.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/stop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/stop.png -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/images/gompei.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/core/src/test/resources/edu/wpi/grip/images/gompei.jpeg -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/preview.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/previous.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/previous.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/publish.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/publish.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/save-as.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/save-as.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/settings.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/warning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/warning.png -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/images/GRIP_Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/core/src/test/resources/edu/wpi/grip/images/GRIP_Logo.png -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/images/testphoto.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/core/src/test/resources/edu/wpi/grip/images/testphoto.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/add-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/add-image.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/add-webcam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/add-webcam.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/desaturate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/desaturate.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/find-blobs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/find-blobs.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/find-lines.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/find-lines.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/rosorg-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/rosorg-logo.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/threshold.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/threshold.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/convex-hulls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/convex-hulls.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/filter-lines.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/filter-lines.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/find-contours.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/find-contours.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/icons/publish-video.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/icons/publish-video.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Bold.ttf -------------------------------------------------------------------------------- /ui/preloader/src/main/resources/edu/wpi/grip/preloader/grip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/preloader/src/main/resources/edu/wpi/grip/preloader/grip.png -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Italic.ttf -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-Regular.ttf -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/MemberSources.vm: -------------------------------------------------------------------------------- 1 | #if($i<$numSources) 2 | protected Mat source$i; 3 | #set($i= $i +1) 4 | #parse("$vmLoc/MemberSources.vm") 5 | #end -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-BoldItalic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WPIRoboticsProjects/GRIP/HEAD/ui/src/main/resources/edu/wpi/grip/ui/roboto/Roboto-BoldItalic.ttf -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/util/service/package-info.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Extensions for the {@link com.google.common.util.concurrent.Service} 3 | */ 4 | package edu.wpi.grip.core.util.service; 5 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/settings/Settings.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.settings; 2 | 3 | /** 4 | * Marker interface for a settings class. 5 | */ 6 | public interface Settings { 7 | } 8 | -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/codegeneration/tools/GenType.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.codegeneration.tools; 2 | 3 | public enum GenType { 4 | NUMBER, BOOLEAN, POINT, SIZE, LINES, CONTOURS, BLOBS, LIST, IMAGE 5 | } 6 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/StopPipelineEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * This event is posted to tell the pipeline to stop executing. 5 | */ 6 | public class StopPipelineEvent { 7 | } 8 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/CV Step.vm: -------------------------------------------------------------------------------- 1 | #if ($step.name() == "Switch") 2 | #parse("$vmLoc/Switch.vm") 3 | #elseif ( $step.name() == "Valve") 4 | #parse("$vmLoc/Valve.vm") 5 | #else 6 | #parse("$vmLoc/BasicStep.vm") 7 | #end -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.3-all.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/cuda/CudaAccelerationMode.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.cuda; 2 | 3 | public class CudaAccelerationMode implements AccelerationMode { 4 | @Override 5 | public boolean isUsingCuda() { 6 | return true; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/cuda/NullAccelerationMode.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.cuda; 2 | 3 | public class NullAccelerationMode implements AccelerationMode { 4 | @Override 5 | public boolean isUsingCuda() { 6 | return false; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/cuda/NullCudaDetector.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.cuda; 2 | 3 | public class NullCudaDetector implements CudaDetector { 4 | @Override 5 | public boolean isCompatibleCudaInstalled() { 6 | return false; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/RunStartedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event fired when the pipeline starts running. This is guaranteed to be followed by a 5 | * corresponding {@link RunStoppedEvent}. 6 | */ 7 | public class RunStartedEvent { 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/Manager.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | 4 | /** 5 | * A network manager that handles all of the API overhead for dealing with a specific network 6 | * protocol. 7 | */ 8 | public interface Manager { 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_transpose.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the transpose of a Mat. 3 | * @param src the source Mat. 4 | * @param dst the transpose of src. 5 | */ 6 | void $className::#func($step ["src", "dst"]) { 7 | cv::transpose(src, dst); 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/Publishable.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | /** 4 | * Interface for classes that can be published with a NetworkPublisher. 5 | * 6 | * @see PublishAnnotatedOperation 7 | */ 8 | public interface Publishable { 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_transpose.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the transpose of a Mat. 3 | * @param src the source Mat. 4 | * @param dst the transpose of src. 5 | */ 6 | private void $tMeth.name($step.name())(Mat src, Mat dst) { 7 | Core.transpose(src, dst); 8 | } -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/codegeneration/GenerationTesting.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.codegeneration; 2 | 3 | /** 4 | * Used by JUnit to indicate tasks belong to the generation category This is intentionally a blank 5 | * interface 6 | */ 7 | public interface GenerationTesting { 8 | 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/util/MockGripPlatform.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.util; 2 | 3 | 4 | import com.google.common.eventbus.EventBus; 5 | 6 | public class MockGripPlatform extends GripPlatform { 7 | public MockGripPlatform(EventBus eventBus) { 8 | super(eventBus); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /.github/workflows/gradle-wrapper-validation.yml: -------------------------------------------------------------------------------- 1 | name: "Validate Gradle Wrapper" 2 | on: [push, pull_request] 3 | 4 | jobs: 5 | validation: 6 | name: "Validation" 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: gradle/wrapper-validation-action@v1 11 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/settings/SettingsSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.settings; 2 | 3 | import com.google.common.testing.AbstractPackageSanityTests; 4 | 5 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 6 | public class SettingsSanityTest extends AbstractPackageSanityTests { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/operations/network/MockNetworkReceiver.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | public class MockNetworkReceiver implements MapNetworkReceiverFactory { 4 | 5 | @Override 6 | public NetworkReceiver create(String path) { 7 | return null; 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/RenderEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event that occurs when the pipeline has completed a run. Anything that needs to render an 5 | * image from the pipeline should do so by subscribing to this event. 6 | */ 7 | public class RenderEvent { 8 | } 9 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_bitwise_not.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per element inverse of an image. 3 | * @param src the image to invert. 4 | * @param dst the inversion of the input image. 5 | */ 6 | void $className::#func($step ["src", "dst"]) { 7 | cv::bitwise_not(src, dst); 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/GripFileModule.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | import com.google.inject.AbstractModule; 4 | 5 | public class GripFileModule extends AbstractModule { 6 | 7 | @Override 8 | protected void configure() { 9 | bind(FileManager.class).to(GripFileManager.class); 10 | } 11 | 12 | } 13 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/sources/CameraSourceUpdater.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sources; 2 | 3 | import org.bytedeco.javacpp.opencv_core.Mat; 4 | 5 | public interface CameraSourceUpdater { 6 | void setFrameRate(double value); 7 | 8 | void copyNewMat(Mat matToCopy); 9 | 10 | void updatesComplete(); 11 | } 12 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/cuda/AccelerationMode.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.cuda; 2 | 3 | /** 4 | * App-wide hardware acceleration mode. 5 | */ 6 | public interface AccelerationMode { 7 | 8 | /** 9 | * Flag marking that GRIP is using CUDA-accelerated OpenCV. 10 | */ 11 | boolean isUsingCuda(); 12 | 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_bitwise_not.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per element inverse of an image. 3 | * @param src the image to invert. 4 | * @param dst the inversion of the input image. 5 | */ 6 | private void $tMeth.name($step.name())(Mat src, Mat dst) { 7 | Core.bitwise_not(src, dst); 8 | } -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | bot: WPIRoboticsProjects-Bot 3 | comment: 4 | layout: header, changes, diff, sunburst 5 | coverage: 6 | notify: 7 | gitter: 8 | default: 9 | branches: 10 | - master 11 | url: https://webhooks.gitter.im/e/71322c5dda6c78c5ee0d 12 | status: 13 | patch: 14 | default: {} 15 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sockets/MockOutputSocketFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | 5 | public class MockOutputSocketFactory extends OutputSocketImpl.FactoryImpl { 6 | public MockOutputSocketFactory(EventBus eventBus) { 7 | super(eventBus); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_transpose.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src): 3 | """Computes the transpose of a Mat. 4 | Args: 5 | src: A numpy.ndarray. 6 | Returns: 7 | A numpy.ndarray. 8 | """ 9 | return cv2.transpose(src) -------------------------------------------------------------------------------- /annotation/src/main/java/edu/wpi/grip/annotation/operation/OperationCategory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.annotation.operation; 2 | 3 | /** 4 | * The categories that entries can be in. 5 | */ 6 | public enum OperationCategory { 7 | IMAGE_PROCESSING, 8 | FEATURE_DETECTION, 9 | NETWORK, 10 | LOGICAL, 11 | OPENCV, 12 | MISCELLANEOUS, 13 | } 14 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/MapNetworkReceiverFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | 4 | /** 5 | * A factory to create {@link NetworkReceiver NetworkRecievers}. 6 | */ 7 | @FunctionalInterface 8 | public interface MapNetworkReceiverFactory { 9 | NetworkReceiver create(String path); 10 | } 11 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/operations/network/ros/MockROSManager.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network.ros; 2 | 3 | public class MockROSManager implements ROSNetworkPublisherFactory { 4 | 5 | @Override 6 | public ROSMessagePublisher create(C converter) { 7 | return null; 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/cuda/CudaDetector.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.cuda; 2 | 3 | /** 4 | * Detects CUDA installs. 5 | */ 6 | public interface CudaDetector { 7 | 8 | /** 9 | * Checks if a CUDA runtime is installed that is compatible with what we need for OpenCV. 10 | */ 11 | boolean isCompatibleCudaInstalled(); 12 | 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_medianBlur.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a median blur on the image. 3 | * @param src image to blur. 4 | * @param kSize size of blur. 5 | * @param dst output of blur. 6 | */ 7 | void $className::#func($step ["src", "kSize", "dst"]) { 8 | cv::medianBlur(src, dst, (int)kSize); 9 | } 10 | -------------------------------------------------------------------------------- /annotation/annotation.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | `java-library` 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | dependencies { 10 | compileOnly(group = "com.google.auto.service", name = "auto-service", version = "1.0-rc6") 11 | annotationProcessor(group = "com.google.auto.service", name = "auto-service", version = "1.0-rc6") 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_add.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Calculates the sum of two Mats. 3 | * @param src1 the first Mat. 4 | * @param src2 the second Mat. 5 | * @param out the Mat that is the sum of the two Mats. 6 | */ 7 | void $className::#func($step ["src1", "src2", "out"]) { 8 | cv::add(src1, src2, out); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_max.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a per element max. 3 | * @param src1 first image to use. 4 | * @param src2 second image to use. 5 | * @param dst the per element max of two images. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::max(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_min.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a per element min. 3 | * @param src1 first image to use. 4 | * @param src2 second image to use. 5 | * @param dst the per element min of two images. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::min(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_add.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Calculates the sum of two Mats. 3 | * @param src1 the first Mat 4 | * @param src2 the second Mat 5 | * @param out the Mat that is the sum of the two Mats 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat out) { 8 | Core.add(src1, src2, out); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_max.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a per element max. 3 | * @param src1 first image to use. 4 | * @param src2 second image to use. 5 | * @param dst the per element max of two images. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat dst) { 8 | Core.max(src1, src2, dst); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_min.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a per element min. 3 | * @param src1 first image to use. 4 | * @param src2 second image to use. 5 | * @param dst the per element min of two images. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat dst) { 8 | Core.min(src1, src2, dst); 9 | } -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/util/MockFileManager.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util; 2 | 3 | import edu.wpi.grip.core.FileManager; 4 | 5 | public class MockFileManager implements FileManager { 6 | 7 | @Override 8 | public void saveImage(byte[] image, String fileName) { 9 | // No body here because this is for testing only. 10 | } 11 | 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_cvtColor.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Converts an image from one color space to another. 3 | * @param src Image to convert. 4 | * @param code conversion code. 5 | * @param dst converted Image. 6 | */ 7 | void $className::#func($step ["src", "code", "dst"]) { 8 | cv::cvtColor(src, dst, code); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_medianBlur.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a median blur on the image. 3 | * @param src image to blur. 4 | * @param kSize size of blur. 5 | * @param dst output of blur. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src, double kSize, Mat dst) { 8 | Imgproc.medianBlur(src, dst, (int)kSize); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_cvtColor.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Converts an image from one color space to another. 3 | * @param src Image to convert. 4 | * @param code conversion code. 5 | * @param dst converted Image. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src, int code, Mat dst) { 8 | Imgproc.cvtColor(src, dst, code); 9 | } -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sockets/MockInputSocket.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | 4 | import com.google.common.eventbus.EventBus; 5 | 6 | public class MockInputSocket extends InputSocketImpl { 7 | public MockInputSocket(String name) { 8 | super(new EventBus(), SocketHints.Outputs.createBooleanSocketHint(name, false)); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/util/GripMode.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util; 2 | 3 | /** 4 | * An enum that indicates if GRIP is running in GUI mode with JavaFX or as a headless command line 5 | * application. To the get the mode, this can be injected into a class (ie: @Inject private GRIPMode 6 | * mode;) 7 | */ 8 | public enum GripMode { 9 | GUI, HEADLESS 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_bitwise_not.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1): 3 | """Computes the per element inverse of an image. 4 | Args: 5 | src1: A numpy.ndarray. 6 | Returns: 7 | The inverse of the numpy.ndarray. 8 | """ 9 | return cv2.bitwise_not(src1) -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/StartSingleBenchmarkRunEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event representing the start of a single benchmarked pipeline run. 5 | */ 6 | public class StartSingleBenchmarkRunEvent implements RunPipelineEvent { 7 | 8 | @Override 9 | public boolean pipelineShouldRun() { 10 | return true; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sockets/MockOutputSocket.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | 5 | public class MockOutputSocket extends OutputSocketImpl { 6 | public MockOutputSocket(String socketName) { 7 | super(new EventBus(), SocketHints.Outputs.createBooleanSocketHint(socketName, false)); 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_subtract.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Subtracts the second Mat from the first. 3 | * @param src1 the first Mat. 4 | * @param src2 the second Mat. 5 | * @param dst the Mat that is the subtraction of the two Mats. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::subtract(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/New_Point.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Fills a point with given x and y values. 3 | * @param x the x value to put in the point. 4 | * @param y the y value to put in the point. 5 | * @param point the point to fill. 6 | */ 7 | void $className::#func($step ["x", "y", "point"]) { 8 | point.x = x; 9 | point.y = y; 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_subtract.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Subtracts the second Mat from the first. 3 | * @param src1 the first Mat 4 | * @param src2 the second Mat 5 | * @param out the Mat that is the subtraction of the two Mats 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat out) { 8 | Core.subtract(src1, src2, out); 9 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/ros/ROSNetworkPublisherFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network.ros; 2 | 3 | 4 | /** 5 | * A factory that can be used to publish ROS messages. 6 | */ 7 | @FunctionalInterface 8 | public interface ROSNetworkPublisherFactory { 9 | ROSMessagePublisher create(C converter); 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_flip.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Flips an image along X, Y or both axes. 3 | * @param src Image to flip. 4 | * @param flipcode FlipCode of which direction to flip. 5 | * @param dst flipped version of the Image. 6 | */ 7 | void $className::#func($step ["src", "flipcode", "dst"]) { 8 | cv::flip(src, dst, flipcode); 9 | } 10 | -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/scripts/addition.py: -------------------------------------------------------------------------------- 1 | import edu.wpi.grip.core.sockets as grip 2 | 3 | inputs = [ 4 | grip.SocketHints.createNumberSocketHint("a", 0.0), 5 | grip.SocketHints.createNumberSocketHint("b", 0.0) 6 | ] 7 | 8 | outputs = [ 9 | grip.SocketHints.Outputs.createNumberSocketHint("sum", 0.0), 10 | ] 11 | 12 | 13 | def perform(a, b): 14 | return a + b 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/New_Size.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Fills a size with given width and height. 3 | * @param width the width of the size. 4 | * @param height the height of the size. 5 | * @param size the size to fill. 6 | */ 7 | void $className::#func($step, ["width", "height", "size"]) { 8 | size.height = height; 9 | size.width = width; 10 | } 11 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/PreviousNext.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | 4 | /** 5 | * An Object that can switch its value. 6 | */ 7 | public interface PreviousNext { 8 | 9 | /** 10 | * Perform the next action on this object. 11 | */ 12 | void next(); 13 | 14 | /** 15 | * Perform the previous action on this object. 16 | */ 17 | void previous(); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/HammingEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum HammingEnum { 6 | 7 | normType(opencv_core.Hamming.normType); 8 | 9 | public final int value; 10 | 11 | HammingEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/New_Point.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Fills a point with given x and y values. 3 | * @param x the x value to put in the point 4 | * @param y the y value to put in the point 5 | * @param point the point to fill 6 | */ 7 | private void $tMeth.name($step.name())(double x, double y, Point point) { 8 | point.x = x; 9 | point.y = y; 10 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_bitwise_xor.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Computes the per channel exclusive or of two images. 4 | Args: 5 | src1: A numpy.ndarray. 6 | Returns: 7 | The inverse of the numpy.ndarray. 8 | """ 9 | return cv2.bitwise_xor(src1, src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_max.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Performs a per element max. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | Returns: 8 | The max as a numpy.ndarray. 9 | """ 10 | return cv2.max(src1, src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_min.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Performs a per element min. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | Returns: 8 | The min as a numpy.ndarray. 9 | """ 10 | return cv2.min(src1, src2) -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/dragging/StepDragService.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.dragging; 2 | 3 | import edu.wpi.grip.core.Step; 4 | 5 | import com.google.inject.Singleton; 6 | 7 | /** 8 | * Service for dragging and dropping a step. 9 | */ 10 | @Singleton 11 | public class StepDragService extends DragService { 12 | 13 | public StepDragService() { 14 | super("step"); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_bitwise_or.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel or of two Images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the or is performed. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::bitwise_or(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_extractChannel.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Extracts given channel from an image. 3 | * @param src the image to extract. 4 | * @param channel zero indexed channel number to extract. 5 | * @param dst output image. 6 | */ 7 | void $className::#func($step ["src", "channel", "dst"]) { 8 | cv::extractChannel(src, dst, (int)channel); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_bitwise_or.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel or of two images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the or is performed. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat dst) { 8 | Core.bitwise_or(src1, src2, dst); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_extractChannel.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Extracts given channel from an image. 3 | * @param src the image to extract. 4 | * @param channel zero indexed channel number to extract. 5 | * @param dst output image. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src, double channel, Mat dst) { 8 | Core.extractChannel(src, dst, (int)channel); 9 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/MapNetworkPublisherFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | 4 | import java.util.Set; 5 | 6 | /** 7 | * A factory to create {@link MapNetworkPublisher MapNetworkPublishers}. 8 | */ 9 | @FunctionalInterface 10 | public interface MapNetworkPublisherFactory { 11 | MapNetworkPublisher create(Set keys); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_absdiff.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Calculates the absolute difference between two Mats. 3 | * @param src1 the first Mat. 4 | * @param src2 the second Mat. 5 | * @param out the Mat that is the absolute difference between the two Mats. 6 | */ 7 | void $className::#func($step, ["src1", "src2", "out"]) { 8 | cv::absdiff(src1, src2, out); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_bitwise_and.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel and of two Images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the and is performed. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::bitwise_and(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_bitwise_xor.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel xor of two Images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the xor is performed. 6 | */ 7 | void $className::#func($step ["src1", "src2", "dst"]) { 8 | cv::bitwise_xor(src1, src2, dst); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_absdiff.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Calculates the absolute difference between two Mats. 3 | * @param src1 the first Mat 4 | * @param src2 the second Mat 5 | * @param out the Mat that is the absolute difference between the two Mats 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat out) { 8 | Core.absdiff(src1, src2, out); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_bitwise_and.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel and of two images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the and is performed. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat dst) { 8 | Core.bitwise_and(src1, src2, dst); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_bitwise_xor.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the per channel xor of two images. 3 | * @param src1 The first image to use. 4 | * @param src2 The second image to use. 5 | * @param dst the result image when the xor is performed. 6 | */ 7 | private void $tMeth.name($step.name())(Mat src1, Mat src2, Mat dst) { 8 | Core.bitwise_xor(src1, src2, dst); 9 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/New_Size.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Fills a size with given width and height. 3 | * @param width the width of the size 4 | * @param height the height of the size 5 | * @param size the size to fill 6 | */ 7 | private void $tMeth.name($step.name())(double width, double height, Size size) { 8 | size.height = height; 9 | size.width = width; 10 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_add.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Calculates the sum of two Mats. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | Returns: 8 | A numpy.ndarray that is the sum. 9 | """ 10 | return cv2.add(src1,src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/New_Point.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(x, y): 3 | """Fills a point with given x and y values. 4 | Args: 5 | x: A number for x. 6 | y: A number for y. 7 | Returns: 8 | A list of two numbers that represent the point. 9 | """ 10 | return (x,y) -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/FileManager.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | /** 4 | * A FileManager saves images to disk. 5 | */ 6 | public interface FileManager { 7 | 8 | /** 9 | * Saves an array of bytes to a file. 10 | * 11 | * @param image The image to save 12 | * @param fileName The file name to save 13 | */ 14 | void saveImage(byte[] image, String fileName); 15 | } 16 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/RNGEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum RNGEnum { 6 | 7 | UNIFORM(opencv_core.RNG.UNIFORM), NORMAL(opencv_core.RNG.NORMAL); 8 | 9 | public final int value; 10 | 11 | RNGEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/scripts/addition-wrong-output-count.py: -------------------------------------------------------------------------------- 1 | import edu.wpi.grip.core.sockets as grip 2 | 3 | inputs = [ 4 | grip.SocketHints.createNumberSocketHint("a", 0.0), 5 | grip.SocketHints.createNumberSocketHint("b", 0.0) 6 | ] 7 | 8 | outputs = [ 9 | grip.SocketHints.Outputs.createNumberSocketHint("sum", 0.0), 10 | ] 11 | 12 | 13 | def perform(a, b): 14 | return a + b, 3 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_applyColorMap.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies a color Map to given Image. 3 | * @param src Image to apply Color Map to. 4 | * @param colorMap the type of color map to apply. 5 | * @param dst Output after Color Map is applied. 6 | */ 7 | void $className::#func($step ["src", "colorMap", "dst"]) { 8 | cv::applyColorMap(src, dst, colorMap); 9 | } 10 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_divide.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Divides one image by another with given scale. 3 | * @param src1 Numerator image. 4 | * @param src2 Denominator image. 5 | * @param scale Scale for division. 6 | * @param dst Result image. 7 | */ 8 | void $className::#func($step ["src1", "src2", "scale", "dst"]) { 9 | cv::divide(src1, src2, dst, scale); 10 | } 11 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/RunStoppedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event fired when the pipeline stops running. This is guaranteed to follow a corresponding 5 | * {@link RunStartedEvent}. 6 | * 7 | *

This is different from {@link RenderEvent} in that it will always be fired when the 8 | * pipeline runs. 9 | */ 10 | public class RunStoppedEvent { 11 | } 12 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_multiply.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Mutiplies one image by another with given scale. 3 | * @param src1 first image. 4 | * @param src2 second image. 5 | * @param scale Scale for multiplication. 6 | * @param dst Result image. 7 | */ 8 | void $className::#func($step ["src1", "src2", "scale", "dst"]) { 9 | cv::multiply(src1, src2, dst, scale); 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_applyColorMap.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies a color Map to given Image. 3 | * @param src Image to apply Color Map to. 4 | * @param colorMap the type of color map to apply. 5 | * @param dst Output after Color Map is applied 6 | */ 7 | private void $tMeth.name($step.name())(Mat src, int colorMap, Mat dst) { 8 | Imgproc.applyColorMap(src, dst, colorMap); 9 | } -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/scripts/addition-wrong-output-type.py: -------------------------------------------------------------------------------- 1 | import edu.wpi.grip.core.sockets as grip 2 | 3 | inputs = [ 4 | grip.SocketHints.createNumberSocketHint("a", 0.0), 5 | grip.SocketHints.createNumberSocketHint("b", 0.0) 6 | ] 7 | 8 | outputs = [ 9 | grip.SocketHints.Outputs.createNumberSocketHint("sum", 0.0), 10 | ] 11 | 12 | 13 | def perform(a, b): 14 | return "I am not an Integer" 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_scaleAdd.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Adds two images where one is multiplied by a number. 3 | * @param src1 first image. 4 | * @param scale scale for the first image. 5 | * @param src2 second image. 6 | * @param dst output image 7 | */ 8 | void $className::#func($step ["src1", "scale", "src2", "dst"]) { 9 | cv::scaleAdd(src1, scale, src2, dst); 10 | } 11 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sockets/MockInputSocketFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | 4 | import edu.wpi.grip.core.cuda.NullCudaDetector; 5 | 6 | import com.google.common.eventbus.EventBus; 7 | 8 | public class MockInputSocketFactory extends InputSocketImpl.FactoryImpl { 9 | 10 | public MockInputSocketFactory(EventBus eventBus) { 11 | super(eventBus, new NullCudaDetector()); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/events/SetStepsExpandedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.events; 2 | 3 | /** 4 | * Toggles every steps' visibility. 5 | */ 6 | public class SetStepsExpandedEvent { 7 | 8 | private final boolean expanded; 9 | 10 | public SetStepsExpandedEvent(boolean expanded) { 11 | this.expanded = expanded; 12 | } 13 | 14 | public boolean isExpanded() { 15 | return expanded; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_divide.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Divides one image by another with given scale. 3 | * @param src1 Numerator image. 4 | * @param src2 Denominator image. 5 | * @param scale Scale for division. 6 | * @param dst Result image. 7 | */ 8 | private void $tMeth.name($step.name())(Mat src1, Mat src2, double scale, Mat dst) { 9 | Core.divide(src1, src2, dst, scale); 10 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_subtract.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Calculates the difference of two mats. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2 A numpy.ndarray. 7 | Returns: 8 | A numpy.ndarray that is the difference. 9 | """ 10 | return cv2.subtract(src1,src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_multiply.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Mutiplies one image by another with given scale. 3 | * @param src1 first image. 4 | * @param src2 second image. 5 | * @param scale Scale for multiplication. 6 | * @param dst Result image. 7 | */ 8 | private void $tMeth.name($step.name())(Mat src1, Mat src2, double scale, Mat dst) { 9 | Core.multiply(src1, src2, dst, scale); 10 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/templated/package-info.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Templates for various {@link edu.wpi.grip.core.Operation Operations}. Many of these classes 3 | * contain duplicated code because there is no way to create types at runtime. The best place to 4 | * start looking at this package is the 5 | * {@link edu.wpi.grip.core.operations.templated.TemplateFactory}. 6 | */ 7 | package edu.wpi.grip.core.operations.templated; 8 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/codegeneration/data/TOutput.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.codegeneration.data; 2 | 3 | 4 | public class TOutput extends TSocket { 5 | 6 | /** 7 | * Constructor that creates a new template output socket. 8 | * @param type the type of the output. 9 | * @param name the name of the output. 10 | */ 11 | public TOutput(String type, String name) { 12 | super(type, name); 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_scaleAdd.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Adds two images where one is multiplied by a number. 3 | * @param src1 first image. 4 | * @param scale scale for the first image. 5 | * @param src2 second image. 6 | * @param dst output image 7 | */ 8 | private void $tMeth.name($step.name())(Mat src1, double scale, Mat src2, Mat dst) { 9 | Core.scaleAdd(src1, scale, src2, dst); 10 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_bitwise_or.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Computes the per channel or of two images. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | Returns: 8 | A numpy.ndarray the or of the two mats. 9 | """ 10 | return cv2.bitwise_or(src1, src2) -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/Controller.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui; 2 | 3 | import javafx.scene.Node; 4 | 5 | /** 6 | * A controller should always provide a method to get the root node that it is controlling. This 7 | * allows the controllers root node to be added to other UI components. 8 | */ 9 | public interface Controller { 10 | 11 | /** 12 | * @return The root node of the controller. 13 | */ 14 | Node getRoot(); 15 | } 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_bitwise_and.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Computes the per channel and of two images. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | Returns: 8 | A numpy.ndarray the and of the two mats. 9 | """ 10 | return cv2.bitwise_and(src1, src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/New_Size.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(width, height): 3 | """Fills a size with given width and height. 4 | Args: 5 | width: A number for the width. 6 | height: A number for the height. 7 | Returns: 8 | A list of two numbers that represent a size. 9 | """ 10 | return (width, height) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_compare.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Compares two Mats using designated method. 3 | * @param src1 first Mat to compare. 4 | * @param src2 second Mat to compare. 5 | * @param cmpop Type of comparison to use. 6 | * @param dst output Mat from the comparison. 7 | */ 8 | void $className::#func($step ["src1", "src2", "cmpop", "dst"]) { 9 | cv::compare(src1, src2, dst, cmpop); 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_absdiff.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2): 3 | """Calculates the absolute difference between two Mats. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2 A numpy.ndarray. 7 | Returns: 8 | The absolute difference as a numpy.ndarray. 9 | """ 10 | return cv2.absdiff(src1,src2) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_applyColorMap.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, color_map): 3 | """Applies a color Map to given Image. 4 | Args: 5 | src: A numpy.ndarray. 6 | color_map: An opencv enum. 7 | Returns: 8 | A numpy.ndarray in the new color space. 9 | """ 10 | return cv2.applyColorMap(src, color_map) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_cvtColor.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, code): 3 | """Converts an image from one color space to another. 4 | Args: 5 | src: A numpy.ndarray. 6 | code: The conversion code. (opencv eum) 7 | Result: 8 | A numpy.ndarray in the new color space. 9 | """ 10 | return cv2.cvtColor(src, code) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_compare.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Compares two Mats using designated method. 3 | * @param src1 first Mat to compare. 4 | * @param src2 second Mat to compare. 5 | * @param cmpop Type of comparison to use. 6 | * @param dst output Mat from the comparison. 7 | */ 8 | private void $tMeth.name($step.name())(Mat src1, Mat src2, int cmpop, Mat dst) { 9 | Core.compare(src1,src2, dst, cmpop); 10 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_medianBlur.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, k_size): 3 | """Performs a median blur on the image. 4 | Args: 5 | src: A numpy.ndarray. 6 | k_size: the scaling factor for the blur as a number. 7 | Returns: 8 | The result as a numpy.ndarray. 9 | """ 10 | return cv2.medianBlur(src, (int)(k_size)) -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/util/SupplierWithIO.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.util; 2 | 3 | import java.io.IOException; 4 | 5 | 6 | /** 7 | * A supplier that can throw an IO exception. Thus putting it on the caller to handle it instead of 8 | * on the creator of the lambda function. 9 | * 10 | * @param The type that the supplier returns. 11 | */ 12 | @FunctionalInterface 13 | public interface SupplierWithIO { 14 | T getWithIO() throws IOException; 15 | } 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_extractChannel.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, channel): 3 | """Extracts given channel from an image. 4 | Args: 5 | src: A numpy.ndarray. 6 | channel: Zero indexed channel number to extract. 7 | Returns: 8 | The result as a numpy.ndarray. 9 | """ 10 | return cv2.extractChannel(src, (int) (channel + 0.5)) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Mask.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, mask): 3 | """Filter out an area of an image using a binary mask. 4 | Args: 5 | input: A three channel numpy.ndarray. 6 | mask: A black and white numpy.ndarray. 7 | Returns: 8 | A three channel numpy.ndarray. 9 | """ 10 | return cv2.bitwise_and(input, input, mask=mask) -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/scripts/addition-subtraction.py: -------------------------------------------------------------------------------- 1 | import edu.wpi.grip.core.sockets as grip 2 | 3 | inputs = [ 4 | grip.SocketHints.createNumberSocketHint("a", 0.0), 5 | grip.SocketHints.createNumberSocketHint("b", 0.0) 6 | ] 7 | 8 | outputs = [ 9 | grip.SocketHints.Outputs.createNumberSocketHint("sum", 0.0), 10 | grip.SocketHints.Outputs.createNumberSocketHint("difference", 0.0), 11 | ] 12 | 13 | 14 | def perform(a, b): 15 | return a + b, a - b 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_flip.vm: -------------------------------------------------------------------------------- 1 | #needs("FlipCode") 2 | @staticmethod 3 | def $tMeth.name($step.name())(src, flipcode): 4 | """Flips an image along X, Y or both axes. 5 | Args: 6 | src: A numpy.ndarray. 7 | flipcode: A flipcode enum to specify the diretion of the flip. 8 | Returns: 9 | The flipped numpy.ndarray. 10 | """ 11 | return cv2.flip(src, flipcode.value) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Valve.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Sets an output if a value is true. 3 | * @param sw The boolean that determines the output. 4 | * @param input The output if sw is true. 5 | * @param output The output which is equal to the input or null. 6 | */ 7 | private static void pipelineValve(boolean sw, T input, #RefOf("T") output) { 8 | if (sw) { 9 | output.set(input); 10 | } 11 | else { 12 | output.set(null); 13 | } 14 | } -------------------------------------------------------------------------------- /core/src/test/resources/edu/wpi/grip/scripts/addition-with-name-and-description.py: -------------------------------------------------------------------------------- 1 | import edu.wpi.grip.core.sockets as grip 2 | 3 | name = "Add" 4 | 5 | summary = "Compute the sum of two integers" 6 | 7 | inputs = [ 8 | grip.SocketHints.createNumberSocketHint("a", 0.0), 9 | grip.SocketHints.createNumberSocketHint("b", 0.0) 10 | ] 11 | 12 | outputs = [ 13 | grip.SocketHints.Outputs.createNumberSocketHint("sum", 0.0), 14 | ] 15 | 16 | 17 | def perform(a, b): 18 | return a + b 19 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_multiply.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2, scale): 3 | """Mutiplies one image by another with given scale. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | scale: Scale for multiplication. 8 | Returns: 9 | The result as a numpy.ndarray. 10 | """ 11 | return cv2.multiply(src1, src2, scale) -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/operations/network/NetworkPackageSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | 4 | import com.google.common.testing.AbstractPackageSanityTests; 5 | 6 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 7 | public class NetworkPackageSanityTest extends AbstractPackageSanityTests { 8 | public NetworkPackageSanityTest() { 9 | super(); 10 | publicApiOnly(); 11 | ignoreClasses(c -> c.getName().contains("Mock")); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Valve.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Sets an output if a value is true. 3 | * 4 | * @param sw The boolean that determines the output. 5 | * @param input The output if sw is true. 6 | * @param output The output which is equal to the input or null. 7 | */ 8 | template 9 | void $className::#func($step ["sw", "input", "output"]) { 10 | if (sw) { 11 | output = input; 12 | } 13 | else { 14 | output = NULL; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_divide.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2, scale): 3 | """Divides one image by another with given scale. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | scale: value of the scale. 8 | Returns 9 | The result of the operation as a numpy.ndarray. 10 | """ 11 | return cv2.divide(src1, src2, scale) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_scaleAdd.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, scale, src2): 3 | """Adds two images where one is multiplied by a number. 4 | Args: 5 | src1: A numpy.ndarray. 6 | scale: Scale for the first image. 7 | src2: A numpy.ndarray. 8 | Returns: 9 | The result as a numpy.ndarray. 10 | """ 11 | return cv2.scaleAdd(src1, scale, src2) -------------------------------------------------------------------------------- /annotation/src/main/java/edu/wpi/grip/annotation/operation/PublishableObject.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.annotation.operation; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * Marks a type as being publishable by a network operation. 10 | */ 11 | @Target(ElementType.TYPE) 12 | @Retention(RetentionPolicy.SOURCE) 13 | public @interface PublishableObject { 14 | } 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/sources/GripSourcesHardwareModule.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sources; 2 | 3 | import com.google.inject.AbstractModule; 4 | 5 | /** 6 | * Adds bindings for hardware that is required by {@link edu.wpi.grip.core.Source Sources} 7 | */ 8 | public class GripSourcesHardwareModule extends AbstractModule { 9 | @Override 10 | protected void configure() { 11 | bind(CameraSource.FrameGrabberFactory.class).to(CameraSource.FrameGrabberFactoryImpl.class); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/UndistortTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum UndistortTypesEnum { 6 | 7 | PROJ_SPHERICAL_ORTHO(opencv_imgproc.PROJ_SPHERICAL_ORTHO), PROJ_SPHERICAL_EQRECT(opencv_imgproc.PROJ_SPHERICAL_EQRECT); 8 | 9 | public final int value; 10 | 11 | UndistortTypesEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/preloader/preloader.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | `application` 3 | } 4 | 5 | createNativeConfigurations() 6 | 7 | dependencies { 8 | javafx("base") 9 | javafx("controls") 10 | javafx("fxml") 11 | javafx("graphics") 12 | } 13 | 14 | application { 15 | mainClassName = "edu.wpi.grip.preloader.Launch" 16 | } 17 | 18 | tasks.named("run") { 19 | classpath = sourceSets["main"].runtimeClasspath 20 | main = application.mainClassName 21 | args = listOf("windowed") 22 | } 23 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/BasicStep.vm: -------------------------------------------------------------------------------- 1 | #foreach($input in $step.getInputs()) 2 | #newInput($input) 3 | #end 4 | #foreach($output in $step.getOutputs()) 5 | #if($output.mutable()) 6 | #RefOf($output.type()) ${tMeth.name($output.name())}Ref = new #RefOf($output.type())(); 7 | #end 8 | #end 9 | $tMeth.callOp($step); 10 | #foreach($output in $step.getOutputs()) 11 | #if($output.mutable()) 12 | ${tMeth.name($output.name())} = ${tMeth.name($output.name())}Ref.get(); 13 | #end 14 | #end -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_compare.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, src2, compare): 3 | """Compares two Mats using designated method. 4 | Args: 5 | src1: A numpy.ndarray. 6 | src2: A numpy.ndarray. 7 | compare: Type of comparison to use. (opencv enum) 8 | Returns: 9 | A numpy.ndarray that is the result. 10 | """ 11 | return cv2.compare(src1, src2, compare) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/step.vm: -------------------------------------------------------------------------------- 1 | # Step $step.name()$step.num(): 2 | #foreach($inp in $step.getInputs()) 3 | #if(!$inp.hasValue()||$inp.value().contains("source")) 4 | #set($c = "s"+$step.num()) 5 | #newInput($inp $c) 6 | 7 | #end#end 8 | (#if($step.name().equals("Threshold_Moving")) 9 | self.__lastImage$step.num(), #end#foreach($out in$step.getOutputs()) 10 | #output($out)#if($velocityCount < $step.getOutputs().size()),#end#end 11 | ) = $tMeth.callOp($step) -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/codegeneration/tools/PipelineInterfacer.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.codegeneration.tools; 2 | 3 | import java.io.File; 4 | 5 | public interface PipelineInterfacer { 6 | 7 | void setMatSource(int num, File img); 8 | 9 | void setNumSource(int num, Number val); 10 | 11 | void process(); 12 | 13 | Object getOutput(String name, GenType type); 14 | 15 | void setSwitch(String name, boolean value); 16 | 17 | void setValve(String name, boolean value); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/exception/GripServerException.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.exception; 2 | 3 | /** 4 | * An exception thrown when something goes wrong in the 5 | * {@link edu.wpi.grip.core.http.GripServer GripServer}. 6 | */ 7 | public class GripServerException extends GripException { 8 | 9 | public GripServerException(String message) { 10 | super(message); 11 | } 12 | 13 | public GripServerException(String message, Throwable cause) { 14 | super(message, cause); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/util/service/ServiceSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util.service; 2 | 3 | 4 | import com.google.common.testing.AbstractPackageSanityTests; 5 | import com.google.common.util.concurrent.Service; 6 | 7 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 8 | public class ServiceSanityTest extends AbstractPackageSanityTests { 9 | 10 | public ServiceSanityTest() { 11 | setDefault(Service.Listener.class, new SingleActionListener(() -> { 12 | })); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/Enum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum Enum { 6 | 7 | ACCESS_READ(opencv_core.ACCESS_READ), ACCESS_WRITE(opencv_core.ACCESS_WRITE), ACCESS_RW(opencv_core.ACCESS_RW), ACCESS_MASK(opencv_core.ACCESS_MASK), ACCESS_FAST(opencv_core.ACCESS_FAST); 8 | 9 | public final int value; 10 | 11 | Enum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sockets/SocketsSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | import com.google.common.testing.AbstractPackageSanityTests; 4 | 5 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 6 | public class SocketsSanityTest extends AbstractPackageSanityTests { 7 | public SocketsSanityTest() { 8 | super(); 9 | setDefault(Enum.class, TestEnum.A); 10 | ignoreClasses(c -> c.getName().contains("Mock")); 11 | } 12 | 13 | enum TestEnum { 14 | A, B, C 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Valve.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def __valve(boolsw, on_true): 3 | """return a value if a value is true or None if it is false. 4 | Args: 5 | boolsw: A boolean that determines what option to choose. 6 | on_true: The output if true. 7 | Returns: 8 | Either the value of on_true or None. 9 | """ 10 | if(boolsw): 11 | return on_true 12 | else: 13 | return None -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Mask.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Filter out an area of an image using a binary mask. 3 | * @param input The image on which the mask filters. 4 | * @param mask The binary image that is used to filter. 5 | * @param output The image in which to store the output. 6 | */ 7 | private void $tMeth.name($step.name())(Mat input, Mat mask, Mat output) { 8 | mask.convertTo(mask, CvType.CV_8UC1); 9 | Core.bitwise_xor(output, output, output); 10 | input.copyTo(output, mask); 11 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/SparseMatEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum SparseMatEnum { 6 | 7 | MAGIC_VAL(opencv_core.SparseMat.MAGIC_VAL), MAX_DIM(opencv_core.SparseMat.MAX_DIM), HASH_SCALE(opencv_core.SparseMat.HASH_SCALE), HASH_BIT(opencv_core.SparseMat.HASH_BIT); 8 | 9 | public final int value; 10 | 11 | SparseMatEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_resize.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Resizes an Image. 3 | * @param src The image to resize. 4 | * @param dSize size to set the image. 5 | * @param fx scale factor along X axis. 6 | * @param fy scale factor along Y axis. 7 | * @param interpolation type of interpolation to use. 8 | * @param dst output image. 9 | */ 10 | void $className::#func($step ["src", "dSize", "fx", "fy", "interpolation", "dst"]) { 11 | cv::resize(src, dst, dSize, fx, fy, interpolation); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Mask.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Filter out an area of an image using a binary mask. 3 | * 4 | * @param input The image on which the mask filters. 5 | * @param mask The binary image that is used to filter. 6 | * @param output The image in which to store the output. 7 | */ 8 | void $className::#func($step ["input", "mask", "output"]) { 9 | mask.convertTo(mask, CV_8UC1); 10 | cv::bitwise_xor(output, output, output); 11 | input.copyTo(output, mask); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Apply a fixed-level threshold to each array element in an image. 3 | * @param src Image to threshold. 4 | * @param thresh threshold value. 5 | * @param maxVal Maximum value for THRES_BINARY and THRES_BINARY_INV. 6 | * @param type Type of threshold to apply. 7 | * @param dst output Image. 8 | */ 9 | void $className::#func($step ["src", "thresh", "maxVal", "type", "dst"]) { 10 | cv::threshold(src, dst, thresh, maxVal, type); 11 | } 12 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Normalize.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, type, a, b): 3 | """Normalizes or remaps the values of pixels in an image. 4 | Args: 5 | input: A numpy.ndarray. 6 | type: Opencv enum. 7 | a: The minimum value. 8 | b: The maximum value. 9 | Returns: 10 | A numpy.ndarray of the same type as the input. 11 | """ 12 | return cv2.normalize(input, None, a, b, type) -------------------------------------------------------------------------------- /ui/linuxLauncher/linuxLauncher.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'cpp' 3 | } 4 | 5 | model { 6 | binaries { 7 | all { 8 | cppCompiler.args '-pthread', '-ldl' 9 | linker.args '-pthread', '-ldl' 10 | } 11 | } 12 | components { 13 | linuxLauncher(NativeExecutableSpec) { 14 | sources { 15 | cpp { 16 | source { 17 | srcDir "src/cpp" 18 | } 19 | } 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Normalize.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Normalizes or remaps the values of pixels in an image. 3 | * @param input The image on which to perform the Normalize. 4 | * @param type The type of normalization. 5 | * @param a The minimum value. 6 | * @param b The maximum value. 7 | * @param output The image in which to store the output. 8 | */ 9 | private void $tMeth.name($step.name())(Mat input, int type, double a, double b, Mat output) { 10 | Core.normalize(input, output, a, b, type); 11 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/GemmFlagsEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum GemmFlagsEnum { 6 | 7 | /** transposes src1 */ 8 | GEMM_1_T(opencv_core.GEMM_1_T), /** transposes src2 */ 9 | GEMM_2_T(opencv_core.GEMM_2_T), /** transposes src3 */ 10 | GEMM_3_T(opencv_core.GEMM_3_T); 11 | 12 | public final int value; 13 | 14 | GemmFlagsEnum(int value) { 15 | this.value = value; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Normalize.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Normalizes or remaps the values of pixels in an image. 3 | * 4 | * @param input The image on which to perform the Normalize. 5 | * @param type The type of normalization. 6 | * @param min The minimum value. 7 | * @param max The maximum value. 8 | * @param output The image in which to store the output. 9 | */ 10 | void $className::#func($step, ["input", "type", "min", "max", "output"]) { 11 | cv::normalize(input, output, min, max, type); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_addWeighted.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the weighted addition of two Mats. 3 | * @param src1 the first Mat. 4 | * @param alpha the weight for the first Mat. 5 | * @param src2 the second Mat. 6 | * @param beta the weight for the second Mat. 7 | * @param gamma Constant to add to each sum. 8 | * @param dst the output Mat. 9 | */ 10 | void $className::#func($step ["src1", "alpha", "src2", "beta", "gamma", "dst"]) { 11 | cv::addWeighted(src1, alpha, src2, beta, gamma, dst); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Lines.vm: -------------------------------------------------------------------------------- 1 | #if($lines == false) 2 | #set($lines = true) 3 | class Line: 4 | 5 | def __init__(self, x1, y1, x2, y2): 6 | self.x1 = x1 7 | self.y1 = y1 8 | self.x2 = x2 9 | self.y2 = y2 10 | 11 | def length(self): 12 | return numpy.sqrt(pow(self.x2 - self.x1, 2) + pow(self.y2 - self.y1, 2)) 13 | 14 | def angle(self): 15 | return math.degrees(math.atan2(self.y2 - self.y1, self.x2 - self.x1)) 16 | #end -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Convex_Hulls.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input_contours): 3 | """Computes the convex hulls of contours. 4 | Args: 5 | input_contours: A list of numpy.ndarray that each represent a contour. 6 | Returns: 7 | A list of numpy.ndarray that each represent a contour. 8 | """ 9 | output = [] 10 | for contour in input_contours: 11 | output.append(cv2.convexHull(contour)) 12 | return output -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/opencv/enumeration/FlipCode.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.opencv.enumeration; 2 | 3 | /** 4 | * FlipCode Codes from documentation in {@link org.bytedeco.javacpp.opencv_core#flip} 5 | */ 6 | public enum FlipCode { 7 | // IMPORTANT! If you change the name of these values then you must also change the FileParser 8 | // in the generator. 9 | X_AXIS(0), 10 | Y_AXIS(1), 11 | BOTH_AXES(-1); 12 | 13 | public final int value; 14 | 15 | FlipCode(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/observables/Observer.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.observables; 2 | 3 | /** 4 | * Observes changes to the value of an {@link Observable}. 5 | * 6 | * @param the type of the value to observe 7 | */ 8 | @FunctionalInterface 9 | public interface Observer { 10 | 11 | /** 12 | * Called when the value of the observable changes. 13 | * 14 | * @param previous the previous value of the observable 15 | * @param current the current value of the observable 16 | */ 17 | void onChange(T previous, T current); 18 | 19 | } 20 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/FileStorageEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum FileStorageEnum { 6 | 7 | UNDEFINED(opencv_core.FileStorage.UNDEFINED), VALUE_EXPECTED(opencv_core.FileStorage.VALUE_EXPECTED), NAME_EXPECTED(opencv_core.FileStorage.NAME_EXPECTED), INSIDE_MAP(opencv_core.FileStorage.INSIDE_MAP); 8 | 9 | public final int value; 10 | 11 | FileStorageEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/LineTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum LineTypesEnum { 6 | 7 | FILLED(opencv_core.FILLED), /** 4-connected line */ 8 | LINE_4(opencv_core.LINE_4), /** 8-connected line */ 9 | LINE_8(opencv_core.LINE_8), /** antialiased line */ 10 | LINE_AA(opencv_core.LINE_AA); 11 | 12 | public final int value; 13 | 14 | LineTypesEnum(int value) { 15 | this.value = value; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/InterpolationMasksEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum InterpolationMasksEnum { 6 | 7 | INTER_BITS(opencv_imgproc.INTER_BITS), INTER_BITS2(opencv_imgproc.INTER_BITS2), INTER_TAB_SIZE(opencv_imgproc.INTER_TAB_SIZE), INTER_TAB_SIZE2(opencv_imgproc.INTER_TAB_SIZE2); 8 | 9 | public final int value; 10 | 11 | InterpolationMasksEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/events/EventsSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import edu.wpi.grip.core.metrics.MockTimer; 4 | import edu.wpi.grip.core.metrics.Timer; 5 | 6 | import com.google.common.testing.AbstractPackageSanityTests; 7 | 8 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 9 | public class EventsSanityTest extends AbstractPackageSanityTests { 10 | // The tests are all in the superclass. 11 | public EventsSanityTest() { 12 | super(); 13 | setDefault(Timer.class, MockTimer.MOCK_FACTORY.create(this)); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Apply a fixed-level threshold to each array element in an image. 3 | * @param src Image to threshold. 4 | * @param threshold threshold value. 5 | * @param maxVal Maximum value for THRES_BINARY and THRES_BINARY_INV 6 | * @param type Type of threshold to appy. 7 | * @param dst output Image. 8 | */ 9 | private void $tMeth.name($step.name())(Mat src, double threshold, double maxVal, int type, 10 | Mat dst) { 11 | Imgproc.threshold(src, dst, threshold, maxVal, type); 12 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/exception/GripException.java: -------------------------------------------------------------------------------- 1 | 2 | package edu.wpi.grip.core.exception; 3 | 4 | /** 5 | * An exception thrown when something goes wrong with an internal GRIP 6 | * operation. This class is {@code abstract} to encourage making subclasses 7 | * for specific cases. 8 | */ 9 | public abstract class GripException extends RuntimeException { 10 | 11 | public GripException(String message) { 12 | super(message); 13 | } 14 | 15 | public GripException(String message, Throwable cause) { 16 | super(message, cause); 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/operations/network/ros/ROSPackageSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network.ros; 2 | 3 | 4 | import com.google.common.testing.AbstractPackageSanityTests; 5 | 6 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 7 | public class ROSPackageSanityTest extends AbstractPackageSanityTests { 8 | public ROSPackageSanityTest() { 9 | super(); 10 | ignoreClasses(c -> c.equals(ROSLoader.class) || c.equals(MockROSManager.class)); 11 | setDefault(JavaToMessageConverter.class, JavaToMessageConverter.BLOBS); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Switch.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Selects an output from two inputs based on a boolean. 3 | * @param sw The boolean that determines the output. 4 | * @param onTrue The output if sw is true. 5 | * @param onFalse The output if sw is false. 6 | * @param output The output which is equal to either onTrue or onFalse. 7 | */ 8 | private void pipelineSwitch(boolean sw, T onTrue, T onFalse, #RefOf("T") output) { 9 | if (sw) { 10 | output.set(onTrue); 11 | } 12 | else { 13 | output.set(onFalse); 14 | } 15 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_addWeighted.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Computes the weighted addition of two Mats. 3 | * @param src1 the first Mat 4 | * @param alpha the weight for the first Mat 5 | * @param src2 the second Mat 6 | * @param beta the weight for the second Mat 7 | * @param gamma Constant to add to each sum 8 | * @param dst the output Mat 9 | */ 10 | private void $tMeth.name($step.name())(Mat src1, double alpha, Mat src2, double beta, 11 | double gamma, Mat dst) { 12 | Core.addWeighted(src1, alpha, src2, beta, gamma, dst); 13 | } -------------------------------------------------------------------------------- /ui/src/test/resources/edu/wpi/grip/ui/codegeneration/tools/pipe/AbsPipeline.cpp: -------------------------------------------------------------------------------- 1 | #include "AbsPipeline.h" 2 | using namespace std; 3 | map AbsPipeline::getMatSources(){ 4 | return this->matSources; 5 | } 6 | map AbsPipeline::getOutputs(){ 7 | return this->outputs; 8 | } 9 | map AbsPipeline::getConditions(){ 10 | return this->conditions; 11 | } 12 | AbsPipeline::~AbsPipeline(){} 13 | 14 | map AbsPipeline::getNumSources(){ 15 | return this->numSources; 16 | } 17 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/DistanceTransformMasksEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum DistanceTransformMasksEnum { 6 | 7 | /** mask=3 */ 8 | DIST_MASK_3(opencv_imgproc.DIST_MASK_3), /** mask=5 */ 9 | DIST_MASK_5(opencv_imgproc.DIST_MASK_5), DIST_MASK_PRECISE(opencv_imgproc.DIST_MASK_PRECISE); 10 | 11 | public final int value; 12 | 13 | DistanceTransformMasksEnum(int value) { 14 | this.value = value; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Switch.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def __switch(boolsw, on_true, on_false): 3 | """Selects an output from two inputs based on a boolean. 4 | Args: 5 | boolsw: A boolean that determines what option to choose. 6 | on_true: The output if true. 7 | on_false: The output if false. 8 | Returns: 9 | Either the value of on_true or on_false. 10 | """ 11 | if(boolsw): 12 | return on_true 13 | else: 14 | return on_false -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Switch.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Selects an output from two inputs based on a boolean. 3 | * 4 | * @param sw The boolean that determines the output. 5 | * @param onTrue The output if sw is true. 6 | * @param onFalse The output if sw is false. 7 | * @param output The output which is equal to either onTrue or onFalse. 8 | */ 9 | template 10 | void $className::#func($step ["sw", "onTrue", "onFalse", "output"]) { 11 | if (sw) { 12 | output = onTrue; 13 | } 14 | else { 15 | output = onFalse; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/operations/network/networktables/NetworkTablesSanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network.networktables; 2 | 3 | 4 | import com.google.common.testing.AbstractPackageSanityTests; 5 | 6 | import edu.wpi.first.networktables.NetworkTableInstance; 7 | 8 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 9 | public class NetworkTablesSanityTest extends AbstractPackageSanityTests { 10 | 11 | public NetworkTablesSanityTest() { 12 | super(); 13 | setDefault(NetworkTableInstance.class, NetworkTableInstance.create()); 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_Threshold.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, thresh, max_val, type): 3 | """Apply a fixed-level threshold to each array element in an image 4 | Args: 5 | src: A numpy.ndarray. 6 | thresh: Threshold value. 7 | max_val: Maximum value for THRES_BINARY and THRES_BINARY_INV. 8 | type: Opencv enum. 9 | Returns: 10 | A black and white numpy.ndarray. 11 | """ 12 | return cv2.threshold(src, thresh, max_val, type)[1] -------------------------------------------------------------------------------- /config/checkstyle/checkstyleSuppressions.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_GaussianBlur.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a Gaussian blur on the image. 3 | * @param src the image to blur. 4 | * @param kSize the kernel size. 5 | * @param sigmaX the deviation in X for the Gaussian blur. 6 | * @param sigmaY the deviation in Y for the Gaussian blur. 7 | * @param borderType pixel extrapolation method. 8 | * @param dst the output image. 9 | */ 10 | void $className::#func($step ["src", "kSize", "sigmaX", "sigmaY", "borderType", "dst"]) { 11 | cv::GaussianBlur(src, dst, kSize, sigmaX, sigmaY, borderType); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_Laplacian.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a Laplacian on the Image. 3 | * @param src Image to use Laplacian on. 4 | * @param size Odd number that is size of the kernel. 5 | * @param scale Scaling factor for Laplacian. 6 | * @param delta offset for values in Laplacian. 7 | * @param borderType pixel extrapolation method. 8 | * @param dst Ouput of Laplacian. 9 | */ 10 | void $className::#func($step ["src", "size", "scale", "delta", "borderType", "dst"]) { 11 | cv::Laplacian(src, dst, 0, (int)size, scale, delta, borderType); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Distance_Transform.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Sets the values of pixels in a binary image to their distance to the nearest black pixel. 3 | * @param input The image on which to perform the Distance Transform. 4 | * @param type The Transform. 5 | * @param maskSize the size of the mask. 6 | * @param output The image in which to store the output. 7 | */ 8 | private void $tMeth.name($step.name())(Mat input,int type, int maskSize, 9 | Mat output) { 10 | Imgproc.distanceTransform(input, output, type, maskSize); 11 | output.convertTo(output, -1); 12 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/LoggableEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import java.util.logging.Level; 4 | 5 | /** 6 | * Interface for loggable events. 7 | */ 8 | public interface LoggableEvent { 9 | 10 | /** 11 | * Gets log level of this event. Defaults to {@link Level#INFO}. 12 | */ 13 | default Level logLevel() { 14 | return Level.INFO; 15 | } 16 | 17 | /** 18 | * Creates a string representation of this event to be logged. Defaults to {@code toString()}. 19 | */ 20 | default String asLoggableString() { 21 | return toString(); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_addWeighted.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src1, alpha, src2, beta, gamma): 3 | """Computes the weighted addition of two Mats. 4 | Args: 5 | src1: A numpy.ndarray. 6 | alpha: The weight for the first Mat. 7 | src2: A numpy.ndarray. 8 | beta: The weight for the second Mat. 9 | gamma: Constant to add to each sum. 10 | Returns: 11 | A numpy.ndarray. 12 | """ 13 | return cv2.addWeighted(src1, alpha, src2, beta, gamma) -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/FormatterEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum FormatterEnum { 6 | 7 | FMT_DEFAULT(opencv_core.Formatter.FMT_DEFAULT), FMT_MATLAB(opencv_core.Formatter.FMT_MATLAB), FMT_CSV(opencv_core.Formatter.FMT_CSV), FMT_PYTHON(opencv_core.Formatter.FMT_PYTHON), FMT_NUMPY(opencv_core.Formatter.FMT_NUMPY), FMT_C(opencv_core.Formatter.FMT_C); 8 | 9 | public final int value; 10 | 11 | FormatterEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Resize_Image.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Scales and image to an exact size. 3 | * 4 | * @param input The image on which to perform the Resize. 5 | * @param width The width of the output in pixels. 6 | * @param height The height of the output in pixels. 7 | * @param interpolation The type of interpolation. 8 | * @param output The image in which to store the output. 9 | */ 10 | void $className::#func($step, ["input", "width", "height", "interpolation", "output"]) { 11 | cv::resize(input, output, cv::Size(width, height), 0.0, 0.0, interpolation); 12 | } 13 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/RunPipelineEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | 4 | /** 5 | * Any event that indicates to the {@link edu.wpi.grip.core.PipelineRunner} that it should run. 6 | */ 7 | public interface RunPipelineEvent { 8 | 9 | /** 10 | * Indicates to the {@link edu.wpi.grip.core.PipelineRunner} that there is an update to one of the 11 | * values and it should run again. 12 | * 13 | * @return true if the {@link edu.wpi.grip.core.PipelineRunner#pipelineFlag} should be released. 14 | */ 15 | default boolean pipelineShouldRun() { 16 | return true; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/PCAFlagsEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum PCAFlagsEnum { 6 | 7 | /** indicates that the input samples are stored as matrix rows */ 8 | DATA_AS_ROW(opencv_core.PCA.DATA_AS_ROW), /** indicates that the input samples are stored as matrix columns */ 9 | DATA_AS_COL(opencv_core.PCA.DATA_AS_COL), USE_AVG(opencv_core.PCA.USE_AVG); 10 | 11 | public final int value; 12 | 13 | PCAFlagsEnum(int value) { 14 | this.value = value; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/dragging/OperationDragService.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.dragging; 2 | 3 | 4 | import edu.wpi.grip.core.Operation; 5 | import edu.wpi.grip.core.OperationMetaData; 6 | 7 | import com.google.inject.Singleton; 8 | 9 | /** 10 | * Service for dragging an {@link Operation} from the 11 | * {@link edu.wpi.grip.ui.pipeline.PipelineController} 12 | * to the {@link edu.wpi.grip.ui.pipeline.PipelineController}. 13 | */ 14 | @Singleton 15 | public class OperationDragService extends DragService { 16 | 17 | public OperationDragService() { 18 | super("operation"); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/MatEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum MatEnum { 6 | 7 | MAGIC_VAL(opencv_core.Mat.MAGIC_VAL), AUTO_STEP(opencv_core.Mat.AUTO_STEP), CONTINUOUS_FLAG(opencv_core.Mat.CONTINUOUS_FLAG), SUBMATRIX_FLAG(opencv_core.Mat.SUBMATRIX_FLAG), MAGIC_MASK(opencv_core.Mat.MAGIC_MASK), TYPE_MASK(opencv_core.Mat.TYPE_MASK), DEPTH_MASK(opencv_core.Mat.DEPTH_MASK); 8 | 9 | public final int value; 10 | 11 | MatEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_resize.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Resizes an image. 3 | * @param src The image to resize. 4 | * @param dSize size to set the image. 5 | * @param fx scale factor along X axis. 6 | * @param fy scale factor along Y axis. 7 | * @param interpolation type of interpolation to use. 8 | * @param dst output image. 9 | */ 10 | private void $tMeth.name($step.name())(Mat src, Size dSize, double fx, double fy, int interpolation, 11 | Mat dst) { 12 | if (dSize==null) { 13 | dSize = new Size(0,0); 14 | } 15 | Imgproc.resize(src, dst, dSize, fx, fy, interpolation); 16 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_resize.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, d_size, fx, fy, interpolation): 3 | """Resizes an Image. 4 | Args: 5 | src: A numpy.ndarray. 6 | d_size: Size to set the image. 7 | fx: The scale factor for the x. 8 | fy: The scale factor for the y. 9 | interpolation: Opencv enum for the type of interpolation. 10 | Returns: 11 | A resized numpy.ndarray. 12 | """ 13 | return cv2.resize(src, d_size, fx=fx, fy=fy, interpolation=interpolation) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Find_Min_and_Max.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, mask): 3 | """Finds the minimum and maximum values of the Mat as well as the associated Points. 4 | Args: 5 | src: A numpy.ndarray. 6 | mask: A black and white numpy.ndarray. 7 | Returns: 8 | The minimum value. 9 | The maximimum value. 10 | The point where the minimum value is located. 11 | The point where the maximum value is located/ 12 | """ 13 | return cv2.minMaxLoc(src, mask) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Resize_Image.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, width, height, interpolation): 3 | """Scales and image to an exact size. 4 | Args: 5 | input: A numpy.ndarray. 6 | Width: The desired width in pixels. 7 | Height: The desired height in pixels. 8 | interpolation: Opencv enum for the type fo interpolation. 9 | Returns: 10 | A numpy.ndarray of the new size. 11 | """ 12 | return cv2.resize(input, ((int)(width), (int)(height)), 0, 0, interpolation) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/OperationList.fxml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 10 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/UMatEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum UMatEnum { 6 | 7 | MAGIC_VAL(opencv_core.UMat.MAGIC_VAL), AUTO_STEP(opencv_core.UMat.AUTO_STEP), CONTINUOUS_FLAG(opencv_core.UMat.CONTINUOUS_FLAG), SUBMATRIX_FLAG(opencv_core.UMat.SUBMATRIX_FLAG), MAGIC_MASK(opencv_core.UMat.MAGIC_MASK), TYPE_MASK(opencv_core.UMat.TYPE_MASK), DEPTH_MASK(opencv_core.UMat.DEPTH_MASK); 8 | 9 | public final int value; 10 | 11 | UMatEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Resize_Image.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Scales and image to an exact size. 3 | * @param input The image on which to perform the Resize. 4 | * @param width The width of the output in pixels. 5 | * @param height The height of the output in pixels. 6 | * @param interpolation The type of interpolation. 7 | * @param output The image in which to store the output. 8 | */ 9 | private void $tMeth.name($step.name())(Mat input, double width, double height, 10 | int interpolation, Mat output) { 11 | Imgproc.resize(input, output, new Size(width, height), 0.0, 0.0, interpolation); 12 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Convex_Hulls.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Compute the convex hulls of contours. 3 | * 4 | * @param inputContours The contours on which to perform the operation. 5 | * @param outputContours The contours where the output will be stored. 6 | */ 7 | void $className::#func($step, ["inputContours", "outputContours"]) { 8 | std::vector > hull (inputContours.size()); 9 | outputContours.clear(); 10 | for (size_t i = 0; i < inputContours.size(); i++ ) { 11 | cv::convexHull(cv::Mat((inputContours)[i]), hull[i], false); 12 | } 13 | outputContours = hull; 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Find_Contours.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Finds contours in an image. 3 | * 4 | * @param input The image to find contours in. 5 | * @param externalOnly if only external contours are to be found. 6 | * @param contours vector of contours to put contours in. 7 | */ 8 | void $className::#func($step ["input", "externalOnly", "contours"]) { 9 | std::vector hierarchy; 10 | contours.clear(); 11 | int mode = externalOnly ? cv::RETR_EXTERNAL : cv::RETR_LIST; 12 | int method = cv::CHAIN_APPROX_SIMPLE; 13 | cv::findContours(input, contours, hierarchy, mode, method); 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_Laplacian.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a Laplacian on the image. 3 | * @param src Image to use Laplacian on. 4 | * @param size Odd number that is size of the kernel. 5 | * @param scale Scaling factor for Laplacian. 6 | * @param delta offset for values in Laplacian. 7 | * @param borderType pixel extrapolation method. 8 | * @param dst Ouput of Laplacian. 9 | */ 10 | private static void $tMeth.name($step.name())(Mat src, double size, double scale, double delta, 11 | int borderType, Mat dst) { 12 | Imgproc.Laplacian(src, dst, 0, (int)size, scale, delta, borderType); 13 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Lines.vm: -------------------------------------------------------------------------------- 1 | #if($lines == false) 2 | #set($lines = true) 3 | public static class Line { 4 | public final double x1, y1, x2, y2; 5 | public Line(double x1, double y1, double x2, double y2) { 6 | this.x1 = x1; 7 | this.y1 = y1; 8 | this.x2 = x2; 9 | this.y2 = y2; 10 | } 11 | public double lengthSquared() { 12 | return Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2); 13 | } 14 | public double length() { 15 | return Math.sqrt(lengthSquared()); 16 | } 17 | public double angle() { 18 | return Math.toDegrees(Math.atan2(y2 - y1, x2 - x1)); 19 | } 20 | } 21 | #end -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_Canny.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies a canny edge detection to the image. 3 | * @param image image to use. 4 | * @param thres1 first threshold for the canny algorithm. 5 | * @param thres2 second threshold for the canny algorithm. 6 | * @param apertureSize aperture size for the Sobel operation. 7 | * @param gradient if the L2 norm should be used. 8 | * @param edges output of the canny. 9 | */ 10 | void $className::#func($step ["image", "thres1", "thres2", "apertureSize", "gradient", "edges"]) { 11 | cv::Canny(image, edges, thres1, thres2, (int)apertureSize, gradient); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/HSV_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on hue, saturation, and value ranges. 3 | * 4 | * @param input The image on which to perform the HSL threshold. 5 | * @param hue The min and max hue. 6 | * @param sat The min and max saturation. 7 | * @param val The min and max value. 8 | * @param output The image in which to store the output. 9 | */ 10 | void $className::#func($step ["input", "hue", "sat", "val", "out"]) { 11 | cv::cvtColor(input, out, cv::COLOR_BGR2HSV); 12 | cv::inRange(out,cv::Scalar(hue[0], sat[0], val[0]), cv::Scalar(hue[1], sat[1], val[1]), out); 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/RGB_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on color ranges. 3 | * 4 | * @param input The image on which to perform the RGB threshold. 5 | * @param red The min and max red. 6 | * @param green The min and max green. 7 | * @param blue The min and max blue. 8 | * @param output The image in which to store the output. 9 | */ 10 | void $className::#func($step ["input", "red", "green", "blue", "output"]) { 11 | cv::cvtColor(input, output, cv::COLOR_BGR2RGB); 12 | cv::inRange(output, cv::Scalar(red[0], green[0], blue[0]), cv::Scalar(red[1], green[1], blue[1]), output); 13 | } 14 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/DistanceTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum DistanceTypesEnum { 6 | 7 | DIST_USER(opencv_imgproc.DIST_USER), DIST_L1(opencv_imgproc.DIST_L1), DIST_L2(opencv_imgproc.DIST_L2), DIST_C(opencv_imgproc.DIST_C), DIST_L12(opencv_imgproc.DIST_L12), DIST_FAIR(opencv_imgproc.DIST_FAIR), DIST_WELSCH(opencv_imgproc.DIST_WELSCH), DIST_HUBER(opencv_imgproc.DIST_HUBER); 8 | 9 | public final int value; 10 | 11 | DistanceTypesEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/BenchmarkEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event posted before and after a pipeline is benchmarked. 5 | */ 6 | public final class BenchmarkEvent implements LoggableEvent { 7 | 8 | private final boolean isStart; 9 | 10 | private BenchmarkEvent(boolean isStart) { 11 | this.isStart = isStart; 12 | } 13 | 14 | public static BenchmarkEvent started() { 15 | return new BenchmarkEvent(true); 16 | } 17 | 18 | public static BenchmarkEvent finished() { 19 | return new BenchmarkEvent(false); 20 | } 21 | 22 | public boolean isStart() { 23 | return isStart; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/DirtiesSaveEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | /** 4 | * An event that can potentially dirty the save file. 5 | * 6 | *

These events ensure that anything that changes causes the save file to be flagged as dirty and 7 | * in need of being saved for the project to be deemed "clean" again. 8 | */ 9 | public interface DirtiesSaveEvent { 10 | 11 | /** 12 | * Some events may have more logic regarding whether they make the save dirty or not. 13 | * 14 | * @return True if this event should dirty the project save 15 | */ 16 | default boolean doesDirtySave() { 17 | return true; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/TermCriteriaTypeEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum TermCriteriaTypeEnum { 6 | 7 | /** the maximum number of iterations or elements to compute */ 8 | COUNT(opencv_core.TermCriteria.COUNT), MAX_ITER(opencv_core.TermCriteria.MAX_ITER), /** the desired accuracy or change in parameters at which the iterative algorithm stops */ 9 | EPS(opencv_core.TermCriteria.EPS); 10 | 11 | public final int value; 12 | 13 | TermCriteriaTypeEnum(int value) { 14 | this.value = value; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/AppSettingsChangedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import edu.wpi.grip.core.settings.AppSettings; 4 | 5 | import static com.google.common.base.Preconditions.checkNotNull; 6 | 7 | /** 8 | * An event fired when the app settings are changed. 9 | */ 10 | public class AppSettingsChangedEvent implements LoggableEvent { 11 | 12 | private final AppSettings appSettings; 13 | 14 | public AppSettingsChangedEvent(AppSettings appSettings) { 15 | this.appSettings = checkNotNull(appSettings, "appSettings"); 16 | } 17 | 18 | public AppSettings getAppSettings() { 19 | return appSettings; 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sources/MockFrameGrabberFactory.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sources; 2 | 3 | import org.bytedeco.javacv.FrameGrabber; 4 | 5 | import java.net.MalformedURLException; 6 | 7 | /** 8 | * Frame Grabber Factory that mocks out the frame grabber that it returns 9 | */ 10 | public class MockFrameGrabberFactory implements CameraSource.FrameGrabberFactory { 11 | 12 | @Override 13 | public FrameGrabber create(int deviceNumber) { 14 | return new SimpleMockFrameGrabber(); 15 | } 16 | 17 | @Override 18 | public FrameGrabber create(String addressProperty) throws MalformedURLException { 19 | return new SimpleMockFrameGrabber(); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/UMatUsageFlagsEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum UMatUsageFlagsEnum { 6 | 7 | USAGE_DEFAULT(opencv_core.USAGE_DEFAULT), USAGE_ALLOCATE_HOST_MEMORY(opencv_core.USAGE_ALLOCATE_HOST_MEMORY), USAGE_ALLOCATE_DEVICE_MEMORY(opencv_core.USAGE_ALLOCATE_DEVICE_MEMORY), USAGE_ALLOCATE_SHARED_MEMORY(opencv_core.USAGE_ALLOCATE_SHARED_MEMORY), __UMAT_USAGE_FLAGS_32BIT(opencv_core.__UMAT_USAGE_FLAGS_32BIT); 8 | 9 | public final int value; 10 | 11 | UMatUsageFlagsEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_Canny.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies a canny edge detection to the image. 3 | * @param image image to use. 4 | * @param thres1 first threshold for the canny algorithm. 5 | * @param thres2 second threshold for the canny algorithm. 6 | * @param apertureSize aperture size for the canny operation. 7 | * @param gradient if the L2 norm should be used. 8 | * @param edges output of the canny. 9 | */ 10 | private void $tMeth.name($step.name())(Mat image, double thres1, double thres2, 11 | double apertureSize, boolean gradient, Mat edges) { 12 | Imgproc.Canny(image, edges, thres1, thres2, (int)apertureSize, gradient); 13 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/RGB_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on color ranges. 3 | * @param input The image on which to perform the RGB threshold. 4 | * @param red The min and max red. 5 | * @param green The min and max green. 6 | * @param blue The min and max blue. 7 | * @param output The image in which to store the output. 8 | */ 9 | private void $tMeth.name($step.name())(Mat input, double[] red, double[] green, double[] blue, 10 | Mat out) { 11 | Imgproc.cvtColor(input, out, Imgproc.COLOR_BGR2RGB); 12 | Core.inRange(out, new Scalar(red[0], green[0], blue[0]), 13 | new Scalar(red[1], green[1], blue[1]), out); 14 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Find_Min_and_Max.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Finds the minimum and maximum values of the Mat as well as the associated Points. 3 | * @param src the Mat to find min and max for. 4 | * @param mask the Mat to use as a mask for the operation. 5 | * @param minVal the minimum value found in the Mat. 6 | * @param maxVal the maximum value found in the Mat. 7 | * @param minLoc the location of the minimum value. 8 | * @param maxLoc the location of the maximum value. 9 | */ 10 | void $className::#func($step ["src", "mask", "minVal", "maxVal", "minLoc", "maxLoc"]) { 11 | cv::minMaxLoc(src, &minVal, &maxVal, &minLoc, &maxLoc, mask); 12 | } 13 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_erode.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Expands area of lower value in an image. 3 | * @param src the Image to erode. 4 | * @param kernel the kernel for erosion. 5 | * @param anchor the center of the kernel. 6 | * @param iterations the number of times to perform the erosion. 7 | * @param borderType pixel extrapolation method. 8 | * @param borderValue value to be used for a constant border. 9 | * @param dst Output Image. 10 | */ 11 | void $className::#func($step ["src", "kernel", "anchor", "iterations", "borderType", "borderValue", "dst"]) { 12 | cv::erode(src, dst, kernel, anchor, (int)iterations, borderType, borderValue); 13 | } 14 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/GrabCutClassesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum GrabCutClassesEnum { 6 | 7 | /** an obvious background pixels */ 8 | GC_BGD(opencv_imgproc.GC_BGD), /** an obvious foreground (object) pixel */ 9 | GC_FGD(opencv_imgproc.GC_FGD), /** a possible background pixel */ 10 | GC_PR_BGD(opencv_imgproc.GC_PR_BGD), /** a possible foreground pixel */ 11 | GC_PR_FGD(opencv_imgproc.GC_PR_FGD); 12 | 13 | public final int value; 14 | 15 | GrabCutClassesEnum(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/RectanglesIntersectTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum RectanglesIntersectTypesEnum { 6 | 7 | /** No intersection */ 8 | INTERSECT_NONE(opencv_imgproc.INTERSECT_NONE), /** There is a partial intersection */ 9 | INTERSECT_PARTIAL(opencv_imgproc.INTERSECT_PARTIAL), /** One of the rectangle is fully enclosed in the other */ 10 | INTERSECT_FULL(opencv_imgproc.INTERSECT_FULL); 11 | 12 | public final int value; 13 | 14 | RectanglesIntersectTypesEnum(int value) { 15 | this.value = value; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_dilate.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Expands area of higher value in an image. 3 | * @param src the Image to dilate. 4 | * @param kernel the kernel for dilation. 5 | * @param anchor the center of the kernel. 6 | * @param iterations the number of times to perform the dilation. 7 | * @param borderType pixel extrapolation method. 8 | * @param borderValue value to be used for a constant border. 9 | * @param dst Output Image. 10 | */ 11 | void $className::#func($step ["src", "kernel", "anchor", "iterations", "borderType", "borderValue", "dst"]) { 12 | cv::dilate(src, dst, kernel, anchor, (int)iterations, borderType, borderValue); 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_GaussianBlur.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Performs a Gaussian blur on the image. 3 | * @param src the image to blur. 4 | * @param kSize the kernel size. 5 | * @param sigmaX the deviation in X for the Gaussian blur. 6 | * @param sigmaY the deviation in Y for the Gaussian blur. 7 | * @param borderType pixel extrapolation method. 8 | * @param dst the output image. 9 | */ 10 | private void $tMeth.name($step.name())(Mat src, Size kSize, double sigmaX, double sigmaY, 11 | int borderType, Mat dst) { 12 | if (kSize == null) { 13 | kSize = new Size(1,1); 14 | } 15 | Imgproc.GaussianBlur(src, dst, kSize, sigmaX, sigmaY, borderType); 16 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Distance_Transform.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, type, mask_size): 3 | """Sets the values of pixels in a binary image to their distance to the nearest black pixel. 4 | Args: 5 | input: A numpy.array. 6 | type: Opencv enum. 7 | mask_size: The size of the mask. Either 0, 3, or 5. 8 | Returns: 9 | A black and white numpy.ndarray. 10 | """ 11 | h, w = input.shape[:2] 12 | dst = numpy.zeros((h, w), numpy.float32) 13 | cv2.distanceTransform(input, type, mask_size, dst = dst) 14 | return numpy.uint8(dst) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/HSV_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on hue, saturation, and value ranges. 3 | * 4 | * @param input The image on which to perform the HSL threshold. 5 | * @param hue The min and max hue 6 | * @param sat The min and max saturation 7 | * @param val The min and max value 8 | * @param output The image in which to store the output. 9 | */ 10 | private void $tMeth.name($step.name())(Mat input, double[] hue, double[] sat, double[] val, 11 | Mat out) { 12 | Imgproc.cvtColor(input, out, Imgproc.COLOR_BGR2HSV); 13 | Core.inRange(out, new Scalar(hue[0], sat[0], val[0]), 14 | new Scalar(hue[1], sat[1], val[1]), out); 15 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_Laplacian.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, size, scale, delta, border_type): 3 | """Performs a Laplacian on the image. 4 | Args: 5 | src: A numpy.ndarray. 6 | size: Odd number that is size of the kernel. 7 | scale: Scaling factor for Laplacian. 8 | delta: Offset for values in Laplacian. 9 | border_type: Opencv enum. 10 | Returns: 11 | The result as a numpy.ndarray. 12 | """ 13 | return cv2.Laplacian(src, 0, ksize=(int)(size+0.5), scale=scale, delta=delta, 14 | borderType=border_type) -------------------------------------------------------------------------------- /ui/src/test/resources/edu/wpi/grip/ui/codegeneration/tools/Handle.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | jfieldID getHandleField(JNIEnv *env, jobject obj) 4 | { 5 | jclass c = env->GetObjectClass(obj); 6 | // J is the type signature for long: 7 | return env->GetFieldID(c, "nativeHandle", "J"); 8 | } 9 | 10 | template 11 | T *getHandle(JNIEnv *env, jobject obj) 12 | { 13 | jlong handle = env->GetLongField(obj, getHandleField(env, obj)); 14 | return reinterpret_cast(handle); 15 | } 16 | 17 | template 18 | void setHandle(JNIEnv *env, jobject obj, T *t) 19 | { 20 | jlong handle = reinterpret_cast(t); 21 | env->SetLongField(obj, getHandleField(env, obj), handle); 22 | } 23 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/HSL_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on hue, saturation, and luminance ranges. 3 | * 4 | * @param input The image on which to perform the HSL threshold. 5 | * @param hue The min and max hue 6 | * @param sat The min and max saturation 7 | * @param lum The min and max luminance 8 | * @param output The image in which to store the output. 9 | */ 10 | private void $tMeth.name($step.name())(Mat input, double[] hue, double[] sat, double[] lum, 11 | Mat out) { 12 | Imgproc.cvtColor(input, out, Imgproc.COLOR_BGR2HLS); 13 | Core.inRange(out, new Scalar(hue[0], lum[0], sat[0]), 14 | new Scalar(hue[1], lum[1], sat[1]), out); 15 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/PublishableRosProxy.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * Marks a 10 | * {@link edu.wpi.grip.core.operations.network.ros.JavaToMessageConverter JavaToMessageConverter} 11 | * field as a proxy for a non-ROS-publishable type. 12 | */ 13 | @Target(ElementType.FIELD) 14 | @Retention(RetentionPolicy.RUNTIME) 15 | public @interface PublishableRosProxy { 16 | 17 | /** 18 | * The type the marked converter is a proxy for. 19 | */ 20 | Class value(); 21 | 22 | } 23 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/ParamEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum ParamEnum { 6 | 7 | INT(opencv_core.Param.INT), BOOLEAN(opencv_core.Param.BOOLEAN), REAL(opencv_core.Param.REAL), STRING(opencv_core.Param.STRING), MAT(opencv_core.Param.MAT), MAT_VECTOR(opencv_core.Param.MAT_VECTOR), ALGORITHM(opencv_core.Param.ALGORITHM), FLOAT(opencv_core.Param.FLOAT), UNSIGNED_INT(opencv_core.Param.UNSIGNED_INT), UINT64(opencv_core.Param.UINT64), UCHAR(opencv_core.Param.UCHAR); 8 | 9 | public final int value; 10 | 11 | ParamEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/UICommandLineHelper.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui; 2 | 3 | import edu.wpi.grip.core.CoreCommandLineHelper; 4 | 5 | import org.apache.commons.cli.Option; 6 | 7 | /** 8 | * Command line helper for the UI. This has all the options of the {@link CoreCommandLineHelper}. 9 | */ 10 | public class UICommandLineHelper extends CoreCommandLineHelper { 11 | 12 | public static final String HEADLESS_OPTION = "headless"; 13 | 14 | private static final Option headlessOption = 15 | Option.builder() 16 | .longOpt(HEADLESS_OPTION) 17 | .desc("Run in headless mode") 18 | .build(); 19 | 20 | public UICommandLineHelper() { 21 | super(headlessOption); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/RGB_Threshold.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, red, green, blue): 3 | """Segment an image based on color ranges. 4 | Args: 5 | input: A BGR numpy.ndarray. 6 | red: A list of two numbers the are the min and max red. 7 | green: A list of two numbers the are the min and max green. 8 | blue: A list of two numbers the are the min and max blue. 9 | Returns: 10 | A black and white numpy.ndarray. 11 | """ 12 | out = cv2.cvtColor(input, cv2.COLOR_BGR2RGB) 13 | return cv2.inRange(out, (red[0], green[0], blue[0]), (red[1], green[1], blue[1])) -------------------------------------------------------------------------------- /annotation/README.md: -------------------------------------------------------------------------------- 1 | # GRIP Annotation Processor 2 | 3 | This subproject contains an annotation processor used to generate manifest files in the core project, 4 | used by the GRIP runtime to discover operations, publishable data types, and aliases for XStream 5 | serialization for save files. 6 | 7 | The annotation processor generates these files: 8 | 9 | | Annotation | File | 10 | |---|---| 11 | | `@Description` | `/META-INF/operations` | 12 | | `@PublishableObject` | `/META-INF/publishables` | 13 | | `@XStreamAlias` | `/META-INF/xstream-aliases` | 14 | 15 | Each file contains a list of the names of the classes annotated with the corresponding annotation, 16 | which is then read by the `MetaInfReader` class in the GRIP core module. 17 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/NormTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum NormTypesEnum { 6 | 7 | NORM_INF(opencv_core.NORM_INF), NORM_L1(opencv_core.NORM_L1), NORM_L2(opencv_core.NORM_L2), NORM_L2SQR(opencv_core.NORM_L2SQR), NORM_HAMMING(opencv_core.NORM_HAMMING), NORM_HAMMING2(opencv_core.NORM_HAMMING2), NORM_TYPE_MASK(opencv_core.NORM_TYPE_MASK), /** flag */ 8 | NORM_RELATIVE(opencv_core.NORM_RELATIVE), /** flag */ 9 | NORM_MINMAX(opencv_core.NORM_MINMAX); 10 | 11 | public final int value; 12 | 13 | NormTypesEnum(int value) { 14 | this.value = value; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Threshold_Moving.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, last_image): 3 | """Thresholds off parts of the image that have moved or changed between 4 | the previous and next image. 5 | Args: 6 | input: A numpy.ndarray. 7 | last_image: The previous value of the numpy.ndarray. 8 | Returns: 9 | A numpy.ndarray with the parts that are the same in black. 10 | """ 11 | if (last_image.shape == input.shape): 12 | output = cv2.absdiff(input, last_image) 13 | else: 14 | output = numpy.ndarray(shape=input.shape) 15 | return input, output -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/ExceptionClearedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import edu.wpi.grip.core.util.ExceptionWitness; 4 | 5 | import static com.google.common.base.Preconditions.checkNotNull; 6 | 7 | /** 8 | * Indicates that an flaggedException, originally indicated by a {@link ExceptionEvent}, has been 9 | * resolved. This class should not be constructed. Instead, use {@link ExceptionWitness} 10 | */ 11 | public class ExceptionClearedEvent { 12 | private final Object origin; 13 | 14 | public ExceptionClearedEvent(Object origin) { 15 | this.origin = checkNotNull(origin, "The origin can not be null"); 16 | } 17 | 18 | public Object getOrigin() { 19 | return origin; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/UMatDataEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum UMatDataEnum { 6 | 7 | COPY_ON_MAP(opencv_core.UMatData.COPY_ON_MAP), HOST_COPY_OBSOLETE(opencv_core.UMatData.HOST_COPY_OBSOLETE), DEVICE_COPY_OBSOLETE(opencv_core.UMatData.DEVICE_COPY_OBSOLETE), TEMP_UMAT(opencv_core.UMatData.TEMP_UMAT), TEMP_COPIED_UMAT(opencv_core.UMatData.TEMP_COPIED_UMAT), USER_ALLOCATED(opencv_core.UMatData.USER_ALLOCATED), DEVICE_MEM_MAPPED(opencv_core.UMatData.DEVICE_MEM_MAPPED); 8 | 9 | public final int value; 10 | 11 | UMatDataEnum(int value) { 12 | this.value = value; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Desaturate.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src): 3 | """Converts a color image into shades of gray. 4 | Args: 5 | src: A color numpy.ndarray. 6 | Returns: 7 | A gray scale numpy.ndarray. 8 | """ 9 | (a, b, channels) = src.shape 10 | if(channels == 1): 11 | return numpy.copy(src) 12 | elif(channels == 3): 13 | return cv2.cvtColor(src, cv2.COLOR_BGR2GRAY) 14 | elif(channels == 4): 15 | return cv2.cvtColor(src, cv2.COLOR_BGRA2GRAY) 16 | else: 17 | raise Exception("Input to desaturate must have 1, 3 or 4 channels") -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/HSV_Threshold.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, hue, sat, val): 3 | """Segment an image based on hue, saturation, and value ranges. 4 | Args: 5 | input: A BGR numpy.ndarray. 6 | hue: A list of two numbers the are the min and max hue. 7 | sat: A list of two numbers the are the min and max saturation. 8 | lum: A list of two numbers the are the min and max value. 9 | Returns: 10 | A black and white numpy.ndarray. 11 | """ 12 | out = cv2.cvtColor(input, cv2.COLOR_BGR2HSV) 13 | return cv2.inRange(out, (hue[0], sat[0], val[0]), (hue[1], sat[1], val[1])) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_Sobel.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Find edges by calculating the requested derivative order for the given image. 3 | * @param src the Image to perform Sobel on. 4 | * @param dx order of x derivative. 5 | * @param dy order of y derivative. 6 | * @param kSize size of kernel. 7 | * @param scale scale factor for derivatives. 8 | * @param delta delta that is added to derivatives. 9 | * @param borderType pixel extrapolation method. 10 | * @param dst Image that is output of Sobel. 11 | */ 12 | void $className::#func($step ["src", "dx", "dy", "kSize", "scale", "delta", "borderType", "dst"]) { 13 | cv::Sobel(src, dst, 0, (int)dx, (int)dy, (int)kSize, scale, delta, borderType); 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/HSL_Threshold.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, hue, sat, lum): 3 | """Segment an image based on hue, saturation, and luminance ranges. 4 | Args: 5 | input: A BGR numpy.ndarray. 6 | hue: A list of two numbers the are the min and max hue. 7 | sat: A list of two numbers the are the min and max saturation. 8 | lum: A list of two numbers the are the min and max luminance. 9 | Returns: 10 | A black and white numpy.ndarray. 11 | """ 12 | out = cv2.cvtColor(input, cv2.COLOR_BGR2HLS) 13 | return cv2.inRange(out, (hue[0], lum[0], sat[0]), (hue[1], lum[1], sat[1])) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Threshold_Moving.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Thresholds off parts of the image that have moved or changed between the previous and next image. 3 | * @param input The image on which to perform the Threshold Moving. 4 | * @param lastImage The mat where the previous image is stored. 5 | * @param output The image in which to store the output. 6 | */ 7 | private void $tMeth.name($step.name())(Mat input, Mat lastImage, Mat output) { 8 | Size lastSize = lastImage.size(); 9 | Size inputSize = input.size(); 10 | if (!lastImage.empty() && lastSize.height == inputSize.height && lastSize.width == inputSize.width) { 11 | Core.absdiff(input, lastImage, output); 12 | } 13 | input.copyTo(lastImage); 14 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Threshold_Moving.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Thresholds off parts of the image that have moved or changed between the previous and next image. 3 | * 4 | * @param input The image on which to perform the Threshold Moving. 5 | * @param lastImage The mat where the previous image is stored. 6 | * @param output The image in which to store the output. 7 | */ 8 | void $className::#func($step ["input", "output"]) { 9 | cv::Size lastSize = lastImage.size(); 10 | cv::Size inputSize = input.size(); 11 | if (!lastImage.empty() && lastSize.height == inputSize.height && 12 | lastSize.width == inputSize.width) { 13 | cv::absdiff(input, lastImage, output); 14 | } 15 | input.copyTo(lastImage); 16 | } 17 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_flip.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Code used for CV_flip. 3 | * Per OpenCV spec 0 -> flip on X axis. 4 | * >0 -> flip on Y axis. 5 | * <0 -> flip on both axes. 6 | */ 7 | public enum FlipCode { 8 | X_AXIS(0), 9 | Y_AXIS(1), 10 | BOTH_AXES(-1); 11 | public final int value; 12 | FlipCode(int value) { 13 | this.value = value; 14 | } 15 | } 16 | 17 | /** 18 | * Flips an image along X, Y or both axes. 19 | * @param src Image to flip. 20 | * @param flipcode FlipCode of which direction to flip. 21 | * @param dst flipped version of the Image. 22 | */ 23 | private void $tMeth.name($step.name())(Mat src, FlipCode flipcode, Mat dst) { 24 | Core.flip(src, dst, flipcode.value); 25 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/CodeGenerationSettingsChangedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import edu.wpi.grip.core.settings.CodeGenerationSettings; 4 | 5 | import static com.google.common.base.Preconditions.checkNotNull; 6 | 7 | /** 8 | * An event fired when code generation settings are changed. 9 | */ 10 | public class CodeGenerationSettingsChangedEvent implements DirtiesSaveEvent, LoggableEvent { 11 | 12 | private final CodeGenerationSettings settings; 13 | 14 | public CodeGenerationSettingsChangedEvent(CodeGenerationSettings settings) { 15 | this.settings = checkNotNull(settings, "settings"); 16 | } 17 | 18 | public CodeGenerationSettings getCodeGenerationSettings() { 19 | return settings; 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/Step.vm: -------------------------------------------------------------------------------- 1 | //input 2 | #if($step.name().toLowerCase().contains("switch") || $step.name().toLowerCase().contains("valve")) 3 | #set($boolInp = $step.getInputs().get(0)) 4 | #if($step.name().toLowerCase().contains("switch")) 5 | bool $tMeth.name($boolInp.name()) = this->$tMeth.name($boolInp.name()); 6 | #set($onTrue = $step.getInputs().get(1)) 7 | #set($onFalse = $step.getInputs().get(2)) 8 | #newInput($onTrue) 9 | #newInput($onFalse) 10 | #else 11 | bool $tMeth.name($boolInp.name()) = this->$tMeth.name($boolInp.name()); 12 | #set($onTrue = $step.getInputs().get(1)) 13 | #newInput($onTrue) 14 | #end 15 | #else 16 | #foreach($input in $step.getInputs()) 17 | #newInput($input) 18 | #end 19 | #end 20 | $tMeth.callOp($step); -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_adaptiveThreshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies an adaptive threshold to an array. 3 | * @param src Input Image. 4 | * @param maxValue Value to assign to pixels that match the condition. 5 | * @param adaptiveMethod adaptive threshold method to use. 6 | * @param thresholdType Type of threshold to use. 7 | * @param blockSize Size of a pixel area that is used to calculate a threshold. 8 | * @param c Constant to subtract from the mean. 9 | * @param dst Output of threshold. 10 | */ 11 | void $className::#func($step, ["src", "maxValue", "adaptiveMethod", "thresholdType", "blockSize", "c", "dst"]) { 12 | cv::adaptiveThreshold(src, dst, maxValue, adaptiveMethod, thresholdType, (int)blockSize, c); 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/HSL_Threshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Segment an image based on hue, saturation, and luminance ranges. 3 | * 4 | * @param input The image on which to perform the HSL threshold. 5 | * @param hue The min and max hue. 6 | * @param sat The min and max saturation. 7 | * @param lum The min and max luminance. 8 | * @param output The image in which to store the output. 9 | */ 10 | //void $tMeth.name($step.name())(Mat *input, double hue[], double sat[], double lum[], Mat *out) { 11 | void $className::#func($step ["input", "hue", "sat", "lum", "out"]) { 12 | cv::cvtColor(input, out, cv::COLOR_BGR2HLS); 13 | cv::inRange(out, cv::Scalar(hue[0], lum[0], sat[0]), cv::Scalar(hue[1], lum[1], sat[1]), out); 14 | } 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/CV_rectangle.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Draws a rectangle on an Image. 3 | * @param src Image to draw rectangle on. 4 | * @param pt1 one corner of the rectangle. 5 | * @param pt2 opposite corner of the rectangle. 6 | * @param color Scalar indicating color to make the rectangle. 7 | * @param thickness Thickness of the lines of the rectangle. 8 | * @param lineType Type of line for the rectangle. 9 | * @param shift Number of decimal places in the points. 10 | * @param dst output image. 11 | */ 12 | void $className::#func($step ["src", "pt1", "pt2", "color", "thickness", "lineType", "shift", "dst"]) { 13 | src.copyTo(dst); 14 | cv::rectangle(dst, pt1, pt2, color, (int)thickness, lineType, (int)shift); 15 | } 16 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Desaturate.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Converts a color image into shades of grey. 3 | * 4 | * @param input The image on which to perform the desaturate. 5 | * @param output The image in which to store the output. 6 | */ 7 | void $className::#func($step ["input", "output"]) { 8 | switch (input.channels()) { 9 | case 1: 10 | // If the input is already one channel, it's already desaturated 11 | input.copyTo(output); 12 | break; 13 | case 3: 14 | cv::cvtColor(input, output, cv::COLOR_BGR2GRAY); 15 | break; 16 | case 4: 17 | cv::cvtColor(input, output, cv::COLOR_BGRA2GRAY); 18 | break; 19 | default: 20 | throw "Input to desaturate must have 1, 3, or 4 channels"; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Distance_Transform.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Sets the values of pixels in a binary image to their distance to the nearest black pixel. 3 | * 4 | * @param input The image on which to perform the Distance Transform. 5 | * @param type The Transform. 6 | * @param maskSize the size of the mask. 7 | * @param output The image in which to store the output. 8 | */ 9 | void $className::#func($step ["input", "type", "maskSize", "output"]) { 10 | int mask; 11 | if (maskSize.compare("0x0") == 0) { 12 | mask = 0; 13 | } else if (maskSize.compare("3x3") == 0) { 14 | mask = 3; 15 | } else { 16 | mask = 5; 17 | } 18 | cv::distanceTransform(input, output, type, mask); 19 | output.convertTo(output, -1); 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/deploy-javadoc.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Javadoc 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | publish: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | with: 14 | fetch-depth: 0 15 | - uses: actions/setup-java@v1 16 | with: 17 | java-version: 11 18 | java-package: jdk+fx 19 | - name: Generate Javadoc 20 | run: ./gradlew aggregateJavadocs 21 | - name: Deploy 🚀 22 | uses: JamesIves/github-pages-deploy-action@3.5.9 23 | with: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | BRANCH: gh-pages 26 | CLEAN: true 27 | FOLDER: build/docs/javadoc 28 | TARGET_FOLDER: javadoc 29 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/DistanceTransformLabelTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum DistanceTransformLabelTypesEnum { 6 | 7 | /** each connected component of zeros in src (as well as all the non-zero pixels closest to the 8 | connected component) will be assigned the same label */ 9 | DIST_LABEL_CCOMP(opencv_imgproc.DIST_LABEL_CCOMP), /** each zero pixel (and all the non-zero pixels closest to it) gets its own label. */ 10 | DIST_LABEL_PIXEL(opencv_imgproc.DIST_LABEL_PIXEL); 11 | 12 | public final int value; 13 | 14 | DistanceTransformLabelTypesEnum(int value) { 15 | this.value = value; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/CmpTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum CmpTypesEnum { 6 | 7 | /** src1 is equal to src2. */ 8 | CMP_EQ(opencv_core.CMP_EQ), /** src1 is greater than src2. */ 9 | CMP_GT(opencv_core.CMP_GT), /** src1 is greater than or equal to src2. */ 10 | CMP_GE(opencv_core.CMP_GE), /** src1 is less than src2. */ 11 | CMP_LT(opencv_core.CMP_LT), /** src1 is less than or equal to src2. */ 12 | CMP_LE(opencv_core.CMP_LE), /** src1 is unequal to src2. */ 13 | CMP_NE(opencv_core.CMP_NE); 14 | 15 | public final int value; 16 | 17 | CmpTypesEnum(int value) { 18 | this.value = value; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_Sobel.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Find edges by calculating the requested derivative order for the given image. 3 | * @param src the Image to perform Sobel on. 4 | * @param dx order of x derivative. 5 | * @param dy order of y derivative. 6 | * @param kSize size of kernel. 7 | * @param scale scale factor for derivatives. 8 | * @param delta delta that is added to derivatives. 9 | * @param borderType pixel extrapolation method. 10 | * @param dst Image that is output of Sobel. 11 | */ 12 | private void $tMeth.name($step.name())(Mat src, double dx, double dy, double kSize, double scale, 13 | double delta, int borderType, Mat dst) { 14 | Imgproc.Sobel(src, dst, 0, (int)dx, (int)dy, (int)kSize, scale, delta, borderType); 15 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/preview/Previews.fxml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | 15 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/BooleanPublishable.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | import edu.wpi.grip.annotation.operation.PublishableObject; 4 | 5 | import javax.annotation.concurrent.Immutable; 6 | 7 | /** 8 | * An adapter to allow booleans to be published from GRIP sockets into a {@link NetworkPublisher}. 9 | */ 10 | @Immutable 11 | @PublishableProxy(Boolean.class) 12 | @PublishableObject 13 | public final class BooleanPublishable implements Publishable { 14 | private final boolean bool; 15 | 16 | public BooleanPublishable(Boolean bool) { 17 | this.bool = bool; 18 | } 19 | 20 | @SuppressWarnings("PMD.BooleanGetMethodName") 21 | @PublishValue(weight = 1) 22 | public boolean getValue() { 23 | return bool; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/Switch.vm: -------------------------------------------------------------------------------- 1 | #set($needsRefClass = true) 2 | //Inputs 3 | #set($boolInp = $step.getInput(0)) 4 | #if(!$boolInp.hasValue()) 5 | $tMeth.name($boolInp.name()) = ${boolInp.value()}.$boolInp.baseType()Value(); 6 | #end 7 | #set($inputTrue = $step.getInput(1)) 8 | #newInput($inputTrue) 9 | #set($inputFalse = $step.getInput(2)) 10 | #newInput($inputFalse) 11 | #set($out = $step.getOutput(0)) 12 | #set($outMutName = $tMeth.name($out.name()) + "Ref") 13 | //Output 14 | #RefOf($out.baseType()) $outMutName = new #RefOf($out.baseType())(); 15 | pipelineSwitch($tMeth.name($boolInp.name()), $tMeth.name($inputTrue.name()), 16 | $tMeth.name($inputFalse.name()), $outMutName); 17 | //output assignment 18 | $tMeth.name($out.name()) = ${outMutName}.get(); -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_Canny.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(image, thres1, thres2, aperture_size, gradient): 3 | """Applies a canny edge detection to the image. 4 | Args: 5 | image: A numpy.ndarray as the input. 6 | thres1: First threshold for the canny algorithm. (number) 7 | thres2: Second threshold for the canny algorithm. (number) 8 | aperture_size: Aperture size for the canny operation. (number) 9 | gradient: If the L2 norm should be used. (boolean) 10 | Returns: 11 | The edges as a numpy.ndarray. 12 | """ 13 | return cv2.Canny(image, thres1, thres2, apertureSize=(int)(aperture_size), 14 | L2gradient=gradient) -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/settings/Setting.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.settings; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * An annotation for specifying JavaBeans display names and short descriptions. This is used to 10 | * provide user-presentable strings for the project settings editor, which is autogenerated by 11 | * ControlsFX using the JavaBeans API. This is similar to the more general-purpose annotations in 12 | * JEP 256: http://openjdk.java.net/jeps/256 13 | */ 14 | @Retention(RetentionPolicy.RUNTIME) 15 | @Target(ElementType.FIELD) 16 | @interface Setting { 17 | String label(); 18 | 19 | String description(); 20 | } 21 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Filter_Lines.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Filters out lines that do not meet certain criteria. 3 | * 4 | * @param inputs The lines that will be filtered. 5 | * @param minLength The minimum length of a line to be kept. 6 | * @param angle The minimum and maximum angle of a line to be kept. 7 | * @param outputs The output lines after the filter. 8 | */ 9 | void $className::#func($step ["inputs", "minLength", "angle", "outputs"]) { 10 | outputs.clear(); 11 | for (Line line: inputs) { 12 | if (line.length()>abs(minLength)) { 13 | if ((line.angle() >= angle[0] && line.angle() <= angle[1]) || 14 | (line.angle() + 180.0 >= angle[0] && line.angle() + 180.0 <=angle[1])) { 15 | outputs.push_back(line); 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_Sobel.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, dx, dy, k_size, scale, delta, border_type): 3 | """Find edges by calculating the requested derivative order for the given image. 4 | Args: 5 | src: A numpy.ndarray. 6 | k_size: Odd number that is size of the kernel. 7 | scale: Scaling factor for Sobel. 8 | delta: Offset for values in Sobel. 9 | border_type: Opencv enum. 10 | Returns: 11 | The result as a numpy.ndarray. 12 | """ 13 | return cv2.Sobel(src, 0, (int)(dx + 0.5), (int)(dy + 0.5), ksize = (int)(k_size + 0.5), 14 | scale = scale, delta = delta, borderType = border_type) 15 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Find_Contours.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, external_only): 3 | """Sets the values of pixels in a binary image to their distance to the nearest black pixel. 4 | Args: 5 | input: A numpy.ndarray. 6 | external_only: A boolean. If true only external contours are found. 7 | Return: 8 | A list of numpy.ndarray where each one represents a contour. 9 | """ 10 | if(external_only): 11 | mode = cv2.RETR_EXTERNAL 12 | else: 13 | mode = cv2.RETR_LIST 14 | method = cv2.CHAIN_APPROX_SIMPLE 15 | im2, contours, hierarchy =cv2.findContours(input, mode=mode, method=method) 16 | return contours -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/util/MockExceptionWitness.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util; 2 | 3 | import com.google.common.eventbus.EventBus; 4 | 5 | import java.util.function.Supplier; 6 | 7 | 8 | public class MockExceptionWitness extends ExceptionWitness { 9 | public static final ExceptionWitness.Factory MOCK_FACTORY = simpleFactory(EventBus::new); 10 | 11 | public MockExceptionWitness(EventBus eventBus, Object origin) { 12 | super(eventBus, origin); 13 | } 14 | 15 | public static ExceptionWitness.Factory simpleFactory(Supplier eventBus) { 16 | return origin -> new MockExceptionWitness(eventBus.get(), origin); 17 | } 18 | 19 | public static ExceptionWitness.Factory simpleFactory(EventBus eventBus) { 20 | return simpleFactory(() -> eventBus); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_erode.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, kernel, anchor, iterations, border_type, border_value): 3 | """Expands area of lower value in an image. 4 | Args: 5 | src: A numpy.ndarray. 6 | kernel: The kernel for erosion. A numpy.ndarray. 7 | iterations: the number of times to erode. 8 | border_type: Opencv enum that represents a border type. 9 | border_value: value to be used for a constant border. 10 | Returns: 11 | A numpy.ndarray after erosion. 12 | """ 13 | return cv2.erode(src, kernel, anchor, iterations = (int) (iterations +0.5), 14 | borderType = border_type, borderValue = border_value) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_adaptiveThreshold.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Applies an adaptive threshold to an array. 3 | * @param src Input Image. 4 | * @param maxValue Value to assign to pixels that match the condition. 5 | * @param adaptiveMethod Adaptive threshold method to use. 6 | * @param thresholdType Type of threshold to use. 7 | * @param blockSize Size of a pixel area that is used to calculate a threshold. 8 | * @param c Constant to subtract from the mean. 9 | * @param dst Output of threshold. 10 | */ 11 | private void $tMeth.name($step.name())(Mat src, double maxValue, int adaptiveMethod, 12 | int thresholdType, double blockSize, double c, Mat dst) { 13 | Imgproc.adaptiveThreshold(src, dst, maxValue, adaptiveMethod, thresholdType, 14 | (int)blockSize, c); 15 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_dilate.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, kernel, anchor, iterations, border_type, border_value): 3 | """Expands area of higher value in an image. 4 | Args: 5 | src: A numpy.ndarray. 6 | kernel: The kernel for dilation. A numpy.ndarray. 7 | iterations: the number of times to dilate. 8 | border_type: Opencv enum that represents a border type. 9 | border_value: value to be used for a constant border. 10 | Returns: 11 | A numpy.ndarray after dilation. 12 | """ 13 | return cv2.dilate(src, kernel, anchor, iterations = (int) (iterations +0.5), 14 | borderType = border_type, borderValue = border_value) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Desaturate.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Converts a color image into shades of grey. 3 | * @param input The image on which to perform the desaturate. 4 | * @param output The image in which to store the output. 5 | */ 6 | private void $tMeth.name($step.name())(Mat input, Mat output) { 7 | switch (input.channels()) { 8 | case 1: 9 | // If the input is already one channel, it's already desaturated 10 | input.copyTo(output); 11 | break; 12 | case 3: 13 | Imgproc.cvtColor(input, output, Imgproc.COLOR_BGR2GRAY); 14 | break; 15 | case 4: 16 | Imgproc.cvtColor(input, output, Imgproc.COLOR_BGRA2GRAY); 17 | break; 18 | default: 19 | throw new IllegalArgumentException("Input to desaturate must have 1, 3, or 4 channels"); 20 | } 21 | } -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/util/TestAnnotationFXMLLoader.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.util; 2 | 3 | import edu.wpi.grip.ui.annotations.ParametrizedController; 4 | 5 | import java.io.IOException; 6 | 7 | import javafx.fxml.FXMLLoader; 8 | 9 | public final class TestAnnotationFXMLLoader { 10 | 11 | private TestAnnotationFXMLLoader() { /* no-op */ } 12 | 13 | public static T load(Object annotatedController) { 14 | try { 15 | return FXMLLoader.load(annotatedController.getClass().getResource( 16 | annotatedController.getClass().getAnnotation(ParametrizedController.class).url()), 17 | null, null, 18 | c -> annotatedController 19 | ); 20 | } catch (IOException e) { 21 | throw new IllegalStateException("Failed to load FXML", e); 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /ui/preloader/src/main/java/edu/wpi/grip/preloader/Launch.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.preloader; 2 | 3 | import javafx.application.Application; 4 | 5 | public final class Launch { 6 | 7 | private Launch() { 8 | } 9 | 10 | /** 11 | * Main entry point for launching GRIP. We use an explicit main method in a separate class to 12 | * allow the JavaFX application to be launched without needing to go through the JVM's 13 | * module reflection (which fails when JavaFX is not on the module path - i.e. ALWAYS). 14 | */ 15 | public static void main(String[] args) { 16 | // JavaFX 11+ uses GTK3 by default, and has problems on some display servers 17 | // This flag forces JavaFX to use GTK2 18 | System.setProperty("jdk.gtk.version", "2"); 19 | Application.launch(GripPreloader.class, args); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/EventLogger.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import com.google.common.eventbus.AllowConcurrentEvents; 4 | import com.google.common.eventbus.Subscribe; 5 | import com.google.inject.Singleton; 6 | 7 | import java.util.logging.Logger; 8 | 9 | /** 10 | * Class for logging events as they're posted to the event bus. 11 | */ 12 | @Singleton 13 | public class EventLogger { 14 | 15 | private static final Logger logger = Logger.getLogger(EventLogger.class.getName()); 16 | 17 | @Subscribe 18 | @AllowConcurrentEvents 19 | public void eventPosted(LoggableEvent event) { 20 | final String threadName = Thread.currentThread().getName(); 21 | logger.log(event.logLevel(), 22 | "Event on thread '" + threadName + "': " + event.asLoggableString()); 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Watershed.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(input, contours): 3 | """Isolates overlapping objects from the background and each other. 4 | This is getting redone in grip. 5 | """ 6 | h, w = input.shape[:2] 7 | markers = numpy.zeros((h, w), numpy.int32) 8 | consize = len(contours[1]) 9 | for i in range(0,consize): 10 | cv2.drawContours(markers, contours[1],i, ((i + 1) * (255 / consize),0,0)) 11 | cv2.circle(markers, (5, 5), 3, (255,255,255), -1, cv2.LINE_8, 0) 12 | print (markers.dtype) 13 | cv2.watershed(image = input, markers = markers) 14 | markers = numpy.uint8(markers) 15 | markers = cv2.bitwise_not(markers) 16 | return markers -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/NumberPublishable.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network; 2 | 3 | import edu.wpi.grip.annotation.operation.PublishableObject; 4 | 5 | import javax.annotation.concurrent.Immutable; 6 | 7 | /** 8 | * An adapter to allow numbers to be published from GRIP sockets into a {@link NetworkPublisher}. 9 | * 10 | * @see PublishAnnotatedOperation#PublishAnnotatedOperation 11 | */ 12 | @Immutable 13 | @PublishableProxy(Number.class) 14 | @PublishableObject 15 | public final class NumberPublishable implements Publishable { 16 | 17 | private final double number; 18 | 19 | public NumberPublishable(Number number) { 20 | this.number = number.doubleValue(); 21 | } 22 | 23 | @PublishValue(weight = 0) 24 | public double getValue() { 25 | return number; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Find_Contours.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Sets the values of pixels in a binary image to their distance to the nearest black pixel. 3 | * @param input The image on which to perform the Distance Transform. 4 | * @param type The Transform. 5 | * @param maskSize the size of the mask. 6 | * @param output The image in which to store the output. 7 | */ 8 | private void $tMeth.name($step.name())(Mat input, boolean externalOnly, 9 | List contours) { 10 | Mat hierarchy = new Mat(); 11 | contours.clear(); 12 | int mode; 13 | if (externalOnly) { 14 | mode = Imgproc.RETR_EXTERNAL; 15 | } 16 | else { 17 | mode = Imgproc.RETR_LIST; 18 | } 19 | int method = Imgproc.CHAIN_APPROX_SIMPLE; 20 | Imgproc.findContours(input, contours, hierarchy, mode, method); 21 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/util/service/ServiceRestartPolicy.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util.service; 2 | 3 | /** 4 | * @see Original version 5 | */ 6 | @FunctionalInterface 7 | public interface ServiceRestartPolicy { 8 | 9 | /** 10 | * A restart policy that has a service restart immediately. 11 | */ 12 | ServiceRestartPolicy IMMEDIATE = () -> 0L; 13 | 14 | /** 15 | * Policy might want to keep track of when the latest restarts have happened. 16 | */ 17 | default void notifyRestart() { 18 | /* no-op */ 19 | } 20 | 21 | /** 22 | * When the service should restart, in nanoseconds from the point when the service failed. A value 23 | * less than or equal to zero means an immediate restart. 24 | */ 25 | long restartDelay(); 26 | 27 | } 28 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/ReduceTypesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum ReduceTypesEnum { 6 | 7 | /** the output is the sum of all rows/columns of the matrix. */ 8 | REDUCE_SUM(opencv_core.REDUCE_SUM), /** the output is the mean vector of all rows/columns of the matrix. */ 9 | REDUCE_AVG(opencv_core.REDUCE_AVG), /** the output is the maximum (column/row-wise) of all rows/columns of the matrix. */ 10 | REDUCE_MAX(opencv_core.REDUCE_MAX), /** the output is the minimum (column/row-wise) of all rows/columns of the matrix. */ 11 | REDUCE_MIN(opencv_core.REDUCE_MIN); 12 | 13 | public final int value; 14 | 15 | ReduceTypesEnum(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/util/UtilitySanityTest.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.util; 2 | 3 | 4 | import com.google.common.testing.AbstractPackageSanityTests; 5 | 6 | @SuppressWarnings("PMD.TestClassWithoutTestCases") 7 | public class UtilitySanityTest extends AbstractPackageSanityTests { 8 | public UtilitySanityTest() { 9 | super(); 10 | publicApiOnly(); 11 | ignoreClasses(c -> c.getName().contains("Mock")); 12 | ignoreClasses(DaemonThread.class::equals); 13 | } 14 | 15 | @SuppressWarnings("PMD.JUnit4TestShouldUseBeforeAnnotation") 16 | @Override 17 | public void setUp() { 18 | SafeShutdownTest.setUpSecurityManager(); 19 | } 20 | 21 | @SuppressWarnings("PMD.JUnit4TestShouldUseAfterAnnotation") 22 | @Override 23 | public void tearDown() { 24 | SafeShutdownTest.tearDownSecurityManager(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/StepIndexer.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | import java.util.Comparator; 4 | 5 | /** 6 | * An interface for getting the indices of steps. 7 | */ 8 | public interface StepIndexer extends Comparator { 9 | 10 | /** 11 | * Gets the index of the given step. 12 | * 13 | * @param step the step to get the index of 14 | * @return the index of the given step, or -1 if this object does not contain that step 15 | */ 16 | int indexOf(Step step); 17 | 18 | /** 19 | * Compares two steps based on their indexes. This is not consistent with {@code equals()}. 20 | * 21 | * @param o1 the first step to compare 22 | * @param o2 the second step to compare 23 | */ 24 | @Override 25 | default int compare(Step o1, Step o2) { 26 | return indexOf(o1) - indexOf(o2); 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/LineSegmentDetectorModesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum LineSegmentDetectorModesEnum { 6 | 7 | /** No refinement applied */ 8 | LSD_REFINE_NONE(opencv_imgproc.LSD_REFINE_NONE), /** Standard refinement is applied. E.g. breaking arches into smaller straighter line approximations. */ 9 | LSD_REFINE_STD(opencv_imgproc.LSD_REFINE_STD), /** Advanced refinement. Number of false alarms is calculated, lines are 10 | * refined through increase of precision, decrement in size, etc. */ 11 | LSD_REFINE_ADV(opencv_imgproc.LSD_REFINE_ADV); 12 | 13 | public final int value; 14 | 15 | LineSegmentDetectorModesEnum(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/MorphShapesEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum MorphShapesEnum { 6 | 7 | /** a rectangular structuring element: \f[E_{ij}=1\f] */ 8 | MORPH_RECT(opencv_imgproc.MORPH_RECT), /** a cross-shaped structuring element: 9 | * \f[E_{ij} = \fork{1}{if i=\texttt{anchor.y} or j=\texttt{anchor.x}}{0}{otherwise}\f] */ 10 | MORPH_CROSS(opencv_imgproc.MORPH_CROSS), /** an elliptic structuring element, that is, a filled ellipse inscribed 11 | * into the rectangle Rect(0, 0, esize.width, 0.esize.height) */ 12 | MORPH_ELLIPSE(opencv_imgproc.MORPH_ELLIPSE); 13 | 14 | public final int value; 15 | 16 | MorphShapesEnum(int value) { 17 | this.value = value; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/sources/SimpleMockFrameGrabber.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sources; 2 | 3 | import org.bytedeco.javacv.Frame; 4 | import org.bytedeco.javacv.FrameGrabber; 5 | 6 | @SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.SignatureDeclareThrowsException"}) 7 | class SimpleMockFrameGrabber extends FrameGrabber { 8 | 9 | @Override 10 | public void start() throws Exception { 11 | /* no-op */ 12 | } 13 | 14 | @Override 15 | public void stop() throws Exception { 16 | /* no-op */ 17 | } 18 | 19 | @Override 20 | public void trigger() throws Exception { 21 | /* no-op */ 22 | } 23 | 24 | @Override 25 | public Frame grab() throws Exception { 26 | return null; 27 | } 28 | 29 | @Override 30 | public void release() throws Exception { 31 | /* no-op */ 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/events/ProjectSettingsChangedEvent.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.events; 2 | 3 | import edu.wpi.grip.core.settings.ProjectSettings; 4 | 5 | import static com.google.common.base.Preconditions.checkNotNull; 6 | 7 | /** 8 | * This event is posted after the {@link ProjectSettings} are changed so anything that relies on 9 | * them can immediately update without restarting the application. 10 | */ 11 | public class ProjectSettingsChangedEvent implements DirtiesSaveEvent, LoggableEvent { 12 | private final ProjectSettings projectSettings; 13 | 14 | public ProjectSettingsChangedEvent(ProjectSettings projectSettings) { 15 | this.projectSettings = checkNotNull(projectSettings, "Project settings cannot be null"); 16 | } 17 | 18 | public ProjectSettings getProjectSettings() { 19 | return projectSettings; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Watershed.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Isolates overlapping objects from the background and each other. 3 | * 4 | * @param input The image used to create the watershed. 5 | * @param contours The contours used to create the watershed. 6 | * @param output The image where the output is stored. 7 | */ 8 | void $className::#func($step ["input", "contours", "output"]) { 9 | cv::Mat markers(input->size(), cv::CV_32SC1, cv::Scalar(0.0)); 10 | for (int i = 0; i < contours->size(); i++) { 11 | cv::drawContours(markers, contours, i, cv::Scalar::all((i + 1) * (255/contours->size()))); 12 | } 13 | cv::circle(markers, cv::Point(5, 5), 3, cv::Scalar::all(255), -1, LINE_8, 0); 14 | cv::watershed(input, markers); 15 | markers->convertTo(output, cv::CV_8UC1); 16 | cv::bitwise_not(output, output); 17 | } 18 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/operations/network/ros/ROSMessagePublisher.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.operations.network.ros; 2 | 3 | 4 | import edu.wpi.grip.core.operations.network.NetworkPublisher; 5 | 6 | import org.ros.internal.message.Message; 7 | import org.ros.message.MessageFactory; 8 | 9 | /** 10 | * A publisher that uses {@link ROSMessagePublisher.Converter} to resolve how the node should 11 | * publish messages. 12 | */ 13 | public abstract class ROSMessagePublisher extends NetworkPublisher { 14 | 15 | @FunctionalInterface 16 | public interface Converter { 17 | /** 18 | * @param m The message to store the data in. 19 | * @param messageFactory A factory to create anny additional messages needed. 20 | */ 21 | void convert(Message m, MessageFactory messageFactory); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Get_Mat_Info.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Gets information about given Mat. 3 | * @param src Mat to get info about. 4 | * @param size the size of the Mat. 5 | * @param empty if the Mat is empty. 6 | * @param channels number of channels in the Mat. 7 | * @param cols number of rows in the Mat. 8 | * @param rows number of columns in the Mat. 9 | * @param highestValue highest value contained in the Mat. 10 | */ 11 | void $className::#func($step ["src", "size", "empty", "channels", "cols", "rows", "highestValue"]) { 12 | empty = src.empty(); 13 | channels = src.channels(); 14 | cols = src.cols; 15 | rows = src.rows; 16 | double lowestValue; 17 | cv::minMaxLoc(src, &lowestValue, &highestValue); 18 | cv::Size matSize = src.size(); 19 | size.height = matSize.height; 20 | size.width = matSize.width; 21 | } 22 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/ui/AboutDialogController.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui; 2 | 3 | import javafx.fxml.FXML; 4 | import javafx.scene.control.Label; 5 | 6 | import javax.inject.Inject; 7 | 8 | public class AboutDialogController { 9 | 10 | @Inject 11 | private Main main; 12 | 13 | @FXML 14 | private Label versionNumberLabel; 15 | 16 | @FXML 17 | private void mousePressedDocumentationButton() { 18 | main.getHostServices().showDocument("http://wpilib.screenstepslive.com/s/4485/m/50711"); 19 | } 20 | 21 | @FXML 22 | private void mousePressedGithubButton() { 23 | main.getHostServices().showDocument("https://github.com/WPIRoboticsProjects/GRIP"); 24 | } 25 | 26 | @FXML 27 | private void initialize() { 28 | versionNumberLabel.setText("Version " + edu.wpi.grip.core.Main.class.getPackage() 29 | .getImplementationVersion()); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_rectangle.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Draws a rectangle on an image. 3 | * @param src Image to draw rectangle on. 4 | * @param pt1 one corner of the rectangle. 5 | * @param pt2 opposite corner of the rectangle. 6 | * @param color Scalar indicating color to make the rectangle. 7 | * @param thickness Thickness of the lines of the rectangle. 8 | * @param lineType Type of line for the rectangle. 9 | * @param shift Number of decimal places in the points. 10 | * @param dst output image. 11 | */ 12 | private void $tMeth.name($step.name())(Mat src, Point pt1, Point pt2, Scalar color, 13 | double thickness, int lineType, double shift, Mat dst) { 14 | src.copyTo(dst); 15 | if (color == null) { 16 | color = Scalar.all(1.0); 17 | } 18 | Imgproc.rectangle(dst, pt1, pt2, color, (int)thickness, lineType, (int)shift); 19 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/SolveLPResultEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum SolveLPResultEnum { 6 | 7 | /** problem is unbounded (target function can achieve arbitrary high values) */ 8 | SOLVELP_UNBOUNDED(opencv_core.SOLVELP_UNBOUNDED), /** problem is unfeasible (there are no points that satisfy all the constraints imposed) */ 9 | SOLVELP_UNFEASIBLE(opencv_core.SOLVELP_UNFEASIBLE), /** there is only one maximum for target function */ 10 | SOLVELP_SINGLE(opencv_core.SOLVELP_SINGLE), /** there are multiple maxima for target function - the arbitrary one is returned */ 11 | SOLVELP_MULTI(opencv_core.SOLVELP_MULTI); 12 | 13 | public final int value; 14 | 15 | SolveLPResultEnum(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Filter_Lines.vm: -------------------------------------------------------------------------------- 1 | #parse("$vmLoc/operations/Lines.vm") 2 | /** 3 | * Filters out lines that do not meet certain criteria. 4 | * @param inputs The lines that will be filtered. 5 | * @param minLength The minimum length of a line to be kept. 6 | * @param angle The minimum and maximum angle of a line to be kept. 7 | * @param outputs The output lines after the filter. 8 | */ 9 | private void $tMeth.name($step.name())(List inputs,double minLength,double[] angle, 10 | List outputs) { 11 | outputs.clear(); 12 | outputs.addAll(inputs.stream() 13 | .filter(line -> line.lengthSquared() >= Math.pow(minLength,2)) 14 | .filter(line -> (line.angle() >= angle[0] && line.angle() <= angle[1]) 15 | || (line.angle() + 180.0 >= angle[0] && line.angle() + 180.0 <= angle[1])) 16 | .collect(Collectors.toList())); 17 | } -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/MockOperation.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | 4 | import edu.wpi.grip.core.sockets.InputSocket; 5 | import edu.wpi.grip.core.sockets.OutputSocket; 6 | 7 | import com.google.common.collect.ImmutableList; 8 | 9 | import java.util.List; 10 | 11 | public class MockOperation implements Operation { 12 | public static final OperationDescription DESCRIPTION 13 | = OperationDescription.builder() 14 | .name("Mock Operation") 15 | .summary("A mock operation summary") 16 | .build(); 17 | 18 | @Override 19 | public List getInputSockets() { 20 | return ImmutableList.of(); 21 | } 22 | 23 | @Override 24 | public List getOutputSockets() { 25 | return ImmutableList.of(); 26 | } 27 | 28 | @Override 29 | public void perform() { 30 | // This operation does nothing because it is a mock. 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/MockStep.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | import edu.wpi.grip.core.metrics.MockTimer; 4 | import edu.wpi.grip.core.util.MockExceptionWitness; 5 | 6 | import com.google.common.eventbus.EventBus; 7 | 8 | import java.util.Collections; 9 | 10 | public class MockStep extends Step { 11 | 12 | public MockStep() { 13 | super(null, 14 | MockOperation.DESCRIPTION, 15 | Collections.emptyList(), 16 | Collections.emptyList(), 17 | origin -> null, 18 | source -> null); 19 | } 20 | 21 | public static Step createMockStepWithOperation() { 22 | final EventBus eventBus = new EventBus(); 23 | return new Step.Factory(origin -> new MockExceptionWitness(eventBus, origin), 24 | source -> new MockTimer(eventBus, source)) 25 | .create(new OperationMetaData(MockOperation.DESCRIPTION, MockOperation::new)); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/core/settings/AppSettingsBeanInfo.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.settings; 2 | 3 | /** 4 | * BeanInfo class for {@link AppSettings}. This inspects annotations on the properties in 5 | * AppSettings to produce PropertyDescriptors with proper display names and descriptions. 6 | * ControlsFX's PropertySheet control uses JavaBean properties to generate the settings editor, so 7 | * we need this class in order to make the properties have user-presentable names and descriptions. 8 | * Another way to do this without annotations would be to hardcode a bunch of PropertyDescriptors 9 | * here, but that would be error-prone (we would get no warning if we add a new setting and forget 10 | * to add a descriptor here). 11 | */ 12 | public class AppSettingsBeanInfo extends SimpleSettingsBeanInfo { 13 | 14 | public AppSettingsBeanInfo() { 15 | super(AppSettings.class); 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Get_Mat_Info.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src): 3 | """Gets information about given Mat. 4 | Args: 5 | src: A numpy.ndarray. 6 | Returns: 7 | The size of the mat as a list of two numbers. 8 | A boolean that is true if the mat is empty. 9 | The number of the channels in the mat. 10 | The number of columns. 11 | The number of rows. 12 | The highest value in the mat. 13 | """ 14 | cols, rows, channels = src.shape 15 | empty = (src.size==0) 16 | lowest_value, highest_value = cv2.minMaxLoc(src if (channels == 1) 17 | else cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)) 18 | mat_size = (rows, cols) 19 | return mat_size, empty, channels, cols, rows, highest_value -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_GaussianBlur.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, k_size, sigma_x, sigma_y, border_type): 3 | """Performs a Gaussian blur on the image. 4 | Args: 5 | src: A numpy.ndarray. 6 | k_size: A list of two numbers that represent the kernel size. 7 | sigma_x: The deviation in X for the Gaussian blur. 8 | sigma_y: The deviation in X for the Gaussian blur. 9 | border_type: Opencv enum representing the border type. 10 | Returns: 11 | A blurred numpy.ndarray. 12 | """ 13 | if (k_size == None): 14 | k_size = (1, 1) 15 | k_size_int = ((int)(k_size[0]),(int)(k_size[1])) 16 | return cv2.GaussianBlur(src, k_size_int, sigmaX=sigma_x, sigmaY=sigma_y, 17 | borderType=(int)(border_type)) -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/MockConnection.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | 4 | import edu.wpi.grip.core.sockets.InputSocket; 5 | import edu.wpi.grip.core.sockets.OutputSocket; 6 | 7 | import com.google.common.eventbus.EventBus; 8 | import com.google.inject.assistedinject.Assisted; 9 | 10 | public class MockConnection extends Connection { 11 | 12 | /** 13 | * @param connectionValidator An object to validate that the connection can be made 14 | * @param outputSocket The socket to listen for changes in. 15 | * @param inputSocket A different socket to update when a change occurs in the first. 16 | */ 17 | public MockConnection(EventBus eventBus, ConnectionValidator connectionValidator, @Assisted 18 | OutputSocket outputSocket, @Assisted InputSocket inputSocket) { 19 | super(eventBus, connectionValidator, outputSocket, inputSocket); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/Filter_Lines.vm: -------------------------------------------------------------------------------- 1 | #parse("$vmLoc/operations/Lines.vm") 2 | @staticmethod 3 | def $tMeth.name($step.name())(inputs, min_length, angle): 4 | """Filters out lines that do not meet certain criteria. 5 | Args: 6 | inputs: A list of Lines. 7 | min_Lenght: The minimum lenght that will be kept. 8 | angle: The minimum and maximum angles in degrees as a list of two numbers. 9 | Returns: 10 | A filtered list of Lines. 11 | """ 12 | outputs = [] 13 | for line in inputs: 14 | if (line.length() > min_length): 15 | if ((line.angle() >= angle[0] and line.angle() <= angle[1]) or 16 | (line.angle() + 180.0 >= angle[0] and line.angle() + 180.0 <= angle[1])): 17 | outputs.append(line) 18 | return outputs -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/sockets/OutputSocket.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | 4 | import edu.wpi.grip.core.Operation; 5 | 6 | /** 7 | * Represents the output of an {@link Operation}. 8 | * 9 | * @param The type of the value that this socket stores. 10 | */ 11 | public interface OutputSocket extends Socket { 12 | 13 | /** 14 | * @return Whether or not this socket is shown in a preview in the GUI. 15 | * @see #setPreviewed(boolean) d(boolean) 16 | */ 17 | boolean isPreviewed(); 18 | 19 | /** 20 | * @param previewed If true, this socket will be shown in a preview in the GUI. 21 | */ 22 | void setPreviewed(boolean previewed); 23 | 24 | /** 25 | * Resets the value of this socket to its initial value. 26 | */ 27 | void resetValueToInitial(); 28 | 29 | interface Factory { 30 | OutputSocket create(SocketHint hint); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /core/src/test/java/edu/wpi/grip/core/ManualPipelineRunner.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core; 2 | 3 | import edu.wpi.grip.core.metrics.MockTimer; 4 | 5 | import com.google.common.eventbus.EventBus; 6 | import com.google.inject.Inject; 7 | 8 | /* 9 | * Do not extend this class. The object is registered in the constructor 10 | */ 11 | public final class ManualPipelineRunner extends PipelineRunner { 12 | 13 | @Inject 14 | public ManualPipelineRunner(EventBus eventBus, Pipeline pipeline) { 15 | super(eventBus, () -> pipeline, MockTimer.simpleFactory(eventBus)); 16 | // This is fine because it is in a test 17 | eventBus.register(this); 18 | } 19 | 20 | @Override 21 | public PipelineRunner startAsync() { 22 | // NOPE 23 | return this; 24 | } 25 | 26 | @SuppressWarnings("PMD.UselessOverridingMethod") 27 | @Override 28 | public void runPipeline() { 29 | super.runPipeline(); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/cpp/operations/Cascade_Classifier.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Runs a cascade classifier on an image. 3 | * 4 | * @param input the image to run the classifier on 5 | * @param classifier the classifier to use 6 | * @param scaleFactor the scale factor of each successive downsized image 7 | * @param minNeighbors how many neighbors each candidate rectangle should have to retain it 8 | * @param minSize the minimum possible object size 9 | * @param maxSize the maximum possible object size. If (0, 0), it is assumed to be unbounded 10 | * @param detections the vector of Rects to store the detected regions in 11 | */ 12 | void $className::#func($step ["input", "classifier", "scaleFactor", "minNeighbors", "minSize", "maxSize", "detections"]) { 13 | classifier.detectMultiScale(input, detections, scaleFactor, minNeighbors, 0, minSize, maxSize); 14 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/core/sockets/NoSocketTypeLabel.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.sockets; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | /** 9 | * By default, the GUI shows labels for the type of each socket based on {@link 10 | * Class#getSimpleName()}. This is useful as a hint to the user of what connections would be valid 11 | * (type-safe) to make. However, for some types this information is kind of useless, since the type 12 | * is either named almost exactly the same as the identifier (for example, 13 | * MySettingsEnum/MySettings), or the type is just so long that it gets cut off in the UI and it's 14 | * probably just best to not include it. 15 | */ 16 | @Retention(RetentionPolicy.RUNTIME) 17 | @Target(ElementType.TYPE) 18 | public @interface NoSocketTypeLabel { 19 | } 20 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'GRIP' 2 | 3 | include 'annotation' 4 | include 'core' 5 | include 'ui' 6 | include 'ui:linuxLauncher' 7 | include 'ui:preloader' 8 | 9 | rootProject.children.each { 10 | setUpChildProject(it) 11 | } 12 | 13 | private void setUpChildProject(ProjectDescriptor project) { 14 | /* 15 | * Instead of every file being named build.gradle.kts we instead use the name ${project.name}.gradle.kts. 16 | * This is much nicer for searching for the file in your IDE. 17 | */ 18 | final String groovyName = "${project.name}.gradle" 19 | final String kotlinName = "${project.name}.gradle.kts" 20 | project.buildFileName = groovyName 21 | if (!project.buildFile.isFile()) { 22 | project.buildFileName = kotlinName 23 | } 24 | assert project.buildFile.isFile(): "File named $groovyName or $kotlinName must exist." 25 | project.children.each { setUpChildProject(it) } 26 | } 27 | -------------------------------------------------------------------------------- /ui/src/main/java/edu/wpi/grip/core/settings/ProjectSettingsBeanInfo.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.core.settings; 2 | 3 | /** 4 | * BeanInfo class for {@link ProjectSettings}. This inspects annotations on the properties in 5 | * ProjectSettings to produce PropertyDescriptors with proper display names and descriptions. 6 | * ControlsFX's PropertySheet control uses JavaBean properties to generate the settings editor, so 7 | * we need this class in order to make the properties have user-presentable names and descriptions. 8 | * Another way to do this without annotations would be to hardcode a bunch of PropertyDescriptors 9 | * here, but that would be error-prone (we would get no warning if we add a new setting and forget 10 | * to add a descriptor here). 11 | */ 12 | public class ProjectSettingsBeanInfo extends SimpleSettingsBeanInfo { 13 | 14 | public ProjectSettingsBeanInfo() { 15 | super(ProjectSettings.class); 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_core/enumeration/SortFlagsEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_core.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_core; 4 | 5 | public enum SortFlagsEnum { 6 | 7 | /** each matrix row is sorted independently */ 8 | SORT_EVERY_ROW(opencv_core.SORT_EVERY_ROW), /** each matrix column is sorted 9 | * independently; this flag and the previous one are 10 | * mutually exclusive. */ 11 | SORT_EVERY_COLUMN(opencv_core.SORT_EVERY_COLUMN), /** each matrix row is sorted in the ascending 12 | * order. */ 13 | SORT_ASCENDING(opencv_core.SORT_ASCENDING), /** each matrix row is sorted in the 14 | * descending order; this flag and the previous one are also 15 | * mutually exclusive. */ 16 | SORT_DESCENDING(opencv_core.SORT_DESCENDING); 17 | 18 | public final int value; 19 | 20 | SortFlagsEnum(int value) { 21 | this.value = value; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/python/operations/CV_adaptiveThreshold.vm: -------------------------------------------------------------------------------- 1 | @staticmethod 2 | def $tMeth.name($step.name())(src, max_value, adaptive_method, threshold_type, block_size, c): 3 | """Applies an adaptive threshold to an array. 4 | Args: 5 | src: A gray scale numpy.ndarray. 6 | max_value: Value to assign to pixels that match the condition. 7 | adaptive_method: Adaptive threshold method to use. (opencv enum) 8 | threshold_type: Type of threshold to use. (opencv enum) 9 | block_size: Size of a pixel area that is used to calculate a threshold.(number) 10 | c: Constant to subtract from the mean.(number) 11 | Returns: 12 | A black and white numpy.ndarray. 13 | """ 14 | return cv2.adaptiveThreshold(src, max_value, adaptive_method, threshold_type, 15 | (int)(block_size + 0.5), c) -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_erode.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Expands area of lower value in an image. 3 | * @param src the Image to erode. 4 | * @param kernel the kernel for erosion. 5 | * @param anchor the center of the kernel. 6 | * @param iterations the number of times to perform the erosion. 7 | * @param borderType pixel extrapolation method. 8 | * @param borderValue value to be used for a constant border. 9 | * @param dst Output Image. 10 | */ 11 | private void $tMeth.name($step.name())(Mat src, Mat kernel, Point anchor, double iterations, 12 | int borderType, Scalar borderValue, Mat dst) { 13 | if (kernel == null) { 14 | kernel = new Mat(); 15 | } 16 | if (anchor == null) { 17 | anchor = new Point(-1,-1); 18 | } 19 | if (borderValue == null) { 20 | borderValue = new Scalar(-1); 21 | } 22 | Imgproc.erode(src, dst, kernel, anchor, (int)iterations, borderType, borderValue); 23 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/CV_dilate.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Expands area of higher value in an image. 3 | * @param src the Image to dilate. 4 | * @param kernel the kernel for dilation. 5 | * @param anchor the center of the kernel. 6 | * @param iterations the number of times to perform the dilation. 7 | * @param borderType pixel extrapolation method. 8 | * @param borderValue value to be used for a constant border. 9 | * @param dst Output Image. 10 | */ 11 | private void $tMeth.name($step.name())(Mat src, Mat kernel, Point anchor, double iterations, 12 | int borderType, Scalar borderValue, Mat dst) { 13 | if (kernel == null) { 14 | kernel = new Mat(); 15 | } 16 | if (anchor == null) { 17 | anchor = new Point(-1,-1); 18 | } 19 | if (borderValue == null){ 20 | borderValue = new Scalar(-1); 21 | } 22 | Imgproc.dilate(src, dst, kernel, anchor, (int)iterations, borderType, borderValue); 23 | } -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Find_Min_and_Max.vm: -------------------------------------------------------------------------------- 1 | /** 2 | * Finds the minimum and maximum values of the Mat as well as the associated points. 3 | * @param src the Mat to find min and max for. 4 | * @param mask the Mat to use as a mask for the operation. 5 | * @param minVal the minimum value found in the Mat. 6 | * @param maxVal the maximum value found in the Mat. 7 | * @param minLoc the location of the minimum value. 8 | * @param maxLoc the location of the maximum value. 9 | */ 10 | private void $tMeth.name($step.name())(Mat src, Mat mask, #RefOf("Number") minVal, 11 | #RefOf("Number") maxVal, Point minLoc, Point maxLoc) { 12 | MinMaxLocResult data = Core.minMaxLoc(src); 13 | minVal.set(Double.valueOf(data.minVal)); 14 | maxVal.set(Double.valueOf(data.maxVal)); 15 | minLoc.x = data.minLoc.x; 16 | minLoc.y = data.minLoc.y; 17 | maxLoc.x = data.maxLoc.x; 18 | maxLoc.y = data.maxLoc.y; 19 | } -------------------------------------------------------------------------------- /ui/src/test/java/edu/wpi/grip/ui/codegeneration/tools/PyLine.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.ui.codegeneration.tools; 2 | 3 | import org.opencv.core.Point; 4 | 5 | public class PyLine extends TestLine { 6 | private final double x1; 7 | private final double y1; 8 | private final double x2; 9 | private final double y2; 10 | 11 | public PyLine(double x1, double y1, double x2, double y2) { 12 | this.x1 = x1; 13 | this.y1 = y1; 14 | this.x2 = x2; 15 | this.y2 = y2; 16 | } 17 | 18 | @Override 19 | public double getLength() { 20 | return Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2)); 21 | } 22 | 23 | @Override 24 | public double getAngle() { 25 | return Math.toDegrees(Math.atan2(y2 - y1, x2 - x1)); 26 | } 27 | 28 | @Override 29 | public Point getPoint1() { 30 | return new Point(x1, y1); 31 | } 32 | 33 | @Override 34 | public Point getPoint2() { 35 | return new Point(x2, y2); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /ui/src/main/resources/edu/wpi/grip/ui/codegeneration/java/operations/Find_Lines.vm: -------------------------------------------------------------------------------- 1 | #parse("$vmLoc/operations/Lines.vm") 2 | /** 3 | * Finds all line segments in an image. 4 | * @param input The image on which to perform the find lines. 5 | * @param lineList The output where the lines are stored. 6 | */ 7 | private void $tMeth.name($step.name())(Mat input, ArrayList lineList) { 8 | final LineSegmentDetector lsd = Imgproc.createLineSegmentDetector(); 9 | final Mat lines = new Mat(); 10 | lineList.clear(); 11 | if (input.channels() == 1) { 12 | lsd.detect(input, lines); 13 | } else { 14 | final Mat tmp = new Mat(); 15 | Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2GRAY); 16 | lsd.detect(tmp, lines); 17 | } 18 | if (!lines.empty()) { 19 | for (int i = 0; i < lines.rows(); i++) { 20 | lineList.add(new Line(lines.get(i, 0)[0], lines.get(i, 0)[1], 21 | lines.get(i, 0)[2], lines.get(i, 0)[3])); 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /core/src/main/java/edu/wpi/grip/generated/opencv_imgproc/enumeration/FloodFillFlagsEnum.java: -------------------------------------------------------------------------------- 1 | package edu.wpi.grip.generated.opencv_imgproc.enumeration; 2 | 3 | import org.bytedeco.javacpp.opencv_imgproc; 4 | 5 | public enum FloodFillFlagsEnum { 6 | 7 | /** If set, the difference between the current pixel and seed pixel is considered. Otherwise, 8 | the difference between neighbor pixels is considered (that is, the range is floating). */ 9 | FLOODFILL_FIXED_RANGE(opencv_imgproc.FLOODFILL_FIXED_RANGE), /** If set, the function does not change the image ( newVal is ignored), and only fills the 10 | mask with the value specified in bits 8-16 of flags as described above. This option only make 11 | sense in function variants that have the mask parameter. */ 12 | FLOODFILL_MASK_ONLY(opencv_imgproc.FLOODFILL_MASK_ONLY); 13 | 14 | public final int value; 15 | 16 | FloodFillFlagsEnum(int value) { 17 | this.value = value; 18 | } 19 | } 20 | --------------------------------------------------------------------------------