├── .gitignore
├── icons
├── gtk-open.png
├── snapshot.png
├── gtk-go-back-ltr.png
├── gtk-media-pause.png
├── gtk-media-stop.png
├── gtk-properties.png
├── gtk-media-next-ltr.png
├── gtk-media-next-rtl.png
├── gtk-media-play-ltr.png
├── gtk-media-record.png
├── gtk-media-rewind-ltr.png
├── gtk-media-rewind-rtl.png
├── gtk-media-forward-ltr.png
├── gtk-media-forward-rtl.png
├── gtk-media-previous-ltr.png
├── gtk-media-previous-rtl.png
└── gtk-media-record-timed.png
├── fmf-tools
├── imagej-plugin
│ ├── Read_FMF.class
│ ├── Read_FMF_Virt.class
│ ├── HandleExtraFileTypes.jar
│ ├── HandleExtraFileTypes.class
│ ├── Read_FMF_Virt.java
│ └── Read_FMF.java
├── matlab
│ ├── Readme.txt
│ └── FMF.m
└── Python
│ └── FMF.py
├── gigaviewer.desktop
├── video-fmf.xml
├── picbackend.cpp
├── mainwindow.h
├── imagepacket.cpp
├── picbackend.h
├── mainwindow.cpp
├── ellipsedetection.h
├── videoglscene.h
├── imagepacket.h
├── mrfsourcesink.h
├── opencvsourcesink.h
├── xvisourcesink.h
├── icons.qrc
├── regexsourcesink.h
├── ampliplugindialog.h
├── fileinputdialog.h
├── fmfsourcesink.h
├── kafkacontroller.h
├── imagesourcesink.h
├── hdf5sourcesink.h
├── cameracontrolsdialog.h
├── ellipsedetectiondialog.h
├── fmfbufferedsourcesink.h
├── vimbaframeobserver.h
├── idssourcesink.h
├── imagesourcesink.cpp
├── coordinator.h
├── avtsourcesink.h
├── interferoplugindialog.h
├── vimbasourcesink.h
├── kafkafrontend.h
├── playbackdialog.h
├── vimbaframeobserver.cpp
├── cambackend.h
├── fileinputdialog.cpp
├── README.txt
├── ampliplugindialog.ui
├── CompilingLinux.txt
├── CompilingMac.txt
├── ellipsedetectiondialog.cpp
├── ampliplugindialog.cpp
├── maingui.h
├── TODO.txt
├── CompilingWindows.txt
├── ellipsedetectiondialog.ui
├── mrfsourcesink.cpp
├── xvisourcesink.cpp
├── coordinator.cpp
├── fileinputdialog.ui
├── cameracontrolsdialog.ui
├── opencvsourcesink.cpp
├── interferoplugindialog.ui
├── videoglscene.cpp
├── cameracontrolsdialog.cpp
├── kafkacontroller.cpp
├── ellipsedetection.cpp
├── regexsourcesink.cpp
├── main.cpp
├── GigaViewer.pro
├── playbackdialog.cpp
├── fmfbufferedsourcesink.cpp
├── kafkafrontend.cpp
├── interferoplugindialog.cpp
└── maingui.cpp
/.gitignore:
--------------------------------------------------------------------------------
1 | /GigaViewer.pro.user
2 | /GigaViewer.pro.user.18
3 |
--------------------------------------------------------------------------------
/icons/gtk-open.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-open.png
--------------------------------------------------------------------------------
/icons/snapshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/snapshot.png
--------------------------------------------------------------------------------
/icons/gtk-go-back-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-go-back-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-pause.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-pause.png
--------------------------------------------------------------------------------
/icons/gtk-media-stop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-stop.png
--------------------------------------------------------------------------------
/icons/gtk-properties.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-properties.png
--------------------------------------------------------------------------------
/icons/gtk-media-next-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-next-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-next-rtl.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-next-rtl.png
--------------------------------------------------------------------------------
/icons/gtk-media-play-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-play-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-record.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-record.png
--------------------------------------------------------------------------------
/icons/gtk-media-rewind-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-rewind-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-rewind-rtl.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-rewind-rtl.png
--------------------------------------------------------------------------------
/icons/gtk-media-forward-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-forward-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-forward-rtl.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-forward-rtl.png
--------------------------------------------------------------------------------
/icons/gtk-media-previous-ltr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-previous-ltr.png
--------------------------------------------------------------------------------
/icons/gtk-media-previous-rtl.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-previous-rtl.png
--------------------------------------------------------------------------------
/icons/gtk-media-record-timed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/icons/gtk-media-record-timed.png
--------------------------------------------------------------------------------
/fmf-tools/imagej-plugin/Read_FMF.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/fmf-tools/imagej-plugin/Read_FMF.class
--------------------------------------------------------------------------------
/fmf-tools/imagej-plugin/Read_FMF_Virt.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/fmf-tools/imagej-plugin/Read_FMF_Virt.class
--------------------------------------------------------------------------------
/fmf-tools/imagej-plugin/HandleExtraFileTypes.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/fmf-tools/imagej-plugin/HandleExtraFileTypes.jar
--------------------------------------------------------------------------------
/fmf-tools/imagej-plugin/HandleExtraFileTypes.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SamDehaeck/GigaViewer/HEAD/fmf-tools/imagej-plugin/HandleExtraFileTypes.class
--------------------------------------------------------------------------------
/gigaviewer.desktop:
--------------------------------------------------------------------------------
1 | [Desktop Entry]
2 | Name=GigaViewer
3 | Exec=/usr/bin/GigaViewer %f
4 | MimeType=video/fmf
5 | Icon=GigaViewer
6 | Terminal=false
7 | Type=Application
8 | Categories=AudioVideo;Player;Video;
9 | Comment=
10 |
11 |
--------------------------------------------------------------------------------
/video-fmf.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Fmf video recording
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/picbackend.cpp:
--------------------------------------------------------------------------------
1 | #include "picbackend.h"
2 |
3 | PicBackend::PicBackend(QObject *parent) :
4 | QObject(parent)
5 | {
6 | }
7 |
8 | void PicBackend::LoadNewImage(QString theName)
9 | {
10 | #ifdef Q_OS_WIN32
11 | Image.image=cv::imread(theName.toStdString().c_str(),0);
12 | #else
13 | Image.image=cv::imread(theName.toUtf8().data(),0);
14 | #endif
15 | emit NewImageReady(Image);
16 | }
17 |
--------------------------------------------------------------------------------
/mainwindow.h:
--------------------------------------------------------------------------------
1 | #ifndef MAINWINDOW_H
2 | #define MAINWINDOW_H
3 |
4 | #include
5 |
6 | class MainWindow : public QMainWindow
7 | {
8 | Q_OBJECT
9 | public:
10 | explicit MainWindow(QWidget *parent = nullptr);
11 |
12 | signals:
13 | void windowClosed();
14 |
15 | public slots:
16 | void toggleFullscreen();
17 |
18 | protected:
19 | void closeEvent(QCloseEvent *);
20 | };
21 |
22 | #endif // MAINWINDOW_H
23 |
--------------------------------------------------------------------------------
/imagepacket.cpp:
--------------------------------------------------------------------------------
1 | #include "imagepacket.h"
2 |
3 | ImagePacket::ImagePacket():seqNumber(0),pixFormat(""),message()
4 | {
5 | }
6 |
7 | ImagePacket::ImagePacket(const ImagePacket &src)
8 | {
9 | image=src.image.clone();
10 | // fileName=src.fileName;
11 | seqNumber=src.seqNumber;
12 | timeStamp=src.timeStamp;
13 | pixFormat=src.pixFormat;
14 | message=src.message;
15 | }
16 |
17 | ImagePacket::~ImagePacket()
18 | {
19 | }
20 |
21 |
22 |
--------------------------------------------------------------------------------
/picbackend.h:
--------------------------------------------------------------------------------
1 | #ifndef PICBACKEND_H
2 | #define PICBACKEND_H
3 |
4 | #include
5 | #include "imagepacket.h"
6 |
7 | class PicBackend : public QObject
8 | {
9 | Q_OBJECT
10 | public:
11 | explicit PicBackend(QObject *parent = 0);
12 |
13 | signals:
14 | void NewImageReady(ImagePacket im);
15 |
16 | public slots:
17 | void LoadNewImage(QString theName);
18 |
19 | private:
20 | ImagePacket Image;
21 |
22 | };
23 |
24 | #endif // PICBACKEND_H
25 |
--------------------------------------------------------------------------------
/mainwindow.cpp:
--------------------------------------------------------------------------------
1 | #include "mainwindow.h"
2 | #include
3 | #include
4 |
5 | MainWindow::MainWindow(QWidget *parent) :
6 | QMainWindow(parent)
7 | {
8 | }
9 |
10 | void MainWindow::closeEvent(QCloseEvent *event)
11 | {
12 | emit windowClosed();
13 | event->accept();
14 | }
15 |
16 | void MainWindow::toggleFullscreen()
17 | {
18 | if (isFullScreen()) {
19 | showMaximized();
20 | } else {
21 | showFullScreen();
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/ellipsedetection.h:
--------------------------------------------------------------------------------
1 | #ifndef ELLIPSEDETECTION_H
2 | #define ELLIPSEDETECTION_H
3 |
4 | #include
5 | #include
6 | #include "imagepacket.h"
7 |
8 | class EllipseDetection
9 | {
10 | private:
11 | int threshold;
12 | int minDiameter;
13 | int maxDiameter;
14 | bool activated;
15 | bool feedback;
16 | QStringList dataToSave;
17 | public:
18 | EllipseDetection(int thresh);
19 | void ChangeSettings(QMap settings);
20 | bool processImage(ImagePacket& currIm);
21 | };
22 |
23 | #endif // ELLIPSEDETECTION_H
24 |
--------------------------------------------------------------------------------
/fmf-tools/matlab/Readme.txt:
--------------------------------------------------------------------------------
1 | To open .fmf files in Matlab you need to use the provided FMF.m file
2 | => place in a good location where Matlab always looks or copy to your working folder
3 | Then type:
4 | f=FMF('E:path\to\file.fmf'); => will open the file and read the header. For now only MONO8, MONO12 and MONO14 implemented
5 | f.w,f.h,f.n_frames => these give you info: width, height and amount of frames in the movie
6 | [I,t]=f.getFrame(50); => I will be the image in double format and t the timestamp.
7 | imshow(I,[]) => extra ,[] required for Matlab to use the true range of the image, otherwise just a white image is shown.
8 |
--------------------------------------------------------------------------------
/videoglscene.h:
--------------------------------------------------------------------------------
1 | #ifndef VIDEOGLSCENE_H
2 | #define VIDEOGLSCENE_H
3 |
4 | #include
5 | //#include
6 | #include "opencv2/opencv.hpp"
7 | #include "fileinputdialog.h"
8 |
9 | class VideoGlScene : public QGraphicsScene//, protected QOpenGLFunctions_3_3_Compatibility
10 | {
11 | Q_OBJECT
12 | public:
13 | VideoGlScene(QList controlDialogs,QObject *parent = 0);
14 | void drawBackground(QPainter *painter, const QRectF &);
15 |
16 | cv::Mat imageBuff;
17 |
18 | signals:
19 |
20 | public slots:
21 |
22 | private:
23 | int didInitOpengl;
24 |
25 |
26 | };
27 |
28 | #endif // VIDEOGLSCENE_H
29 |
--------------------------------------------------------------------------------
/imagepacket.h:
--------------------------------------------------------------------------------
1 | #ifndef IMAGE_H
2 | #define IMAGE_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include //for msvc10 compiler (to remove or make a condition for gcc)
9 |
10 | struct ImagePacket {
11 | cv::Mat image;//,mask,groupImage;
12 | // std::string fileName;
13 | int seqNumber;
14 | double timeStamp;
15 | QString pixFormat;
16 | QVariantMap message;
17 |
18 | ImagePacket();
19 | ImagePacket(const ImagePacket &src);
20 | ~ImagePacket();
21 | };
22 |
23 | Q_DECLARE_METATYPE(ImagePacket);
24 |
25 |
26 | #endif // IMAGE_H
27 |
--------------------------------------------------------------------------------
/mrfsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef MRFSOURCESINK_H
2 | #define MRFSOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "imagesourcesink.h"
6 |
7 | class MrfSourceSink : public ImageSourceSink
8 | {
9 | public:
10 | bool Init(QString params="");
11 | bool StartAcquisition(QString dev="0");
12 | bool StopAcquisition();
13 | bool ReleaseCamera();
14 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
15 | bool IsOpened();
16 | bool SkipFrames(bool forward);
17 |
18 | private:
19 | FILE * mrf;
20 | int mrf_npixels;
21 | uint64_t headersize;
22 | uint64_t bytesperchunk;
23 | uint64_t footersize;
24 | uint32_t bitsperpixel;
25 | QString dataformat;
26 | int rows,cols;
27 | uint nFrames;
28 | int currPos;
29 | };
30 |
31 | #endif // MRFSOURCESINK_H
32 |
--------------------------------------------------------------------------------
/opencvsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef OPENCVSOURCESINK_H
2 | #define OPENCVSOURCESINK_H
3 |
4 | #include "imagesourcesink.h"
5 |
6 | class OpencvSourceSink: public ImageSourceSink
7 | {
8 | public:
9 | bool Init(QString params="");
10 | bool StartAcquisition(QString dev="0");
11 | bool StopAcquisition();
12 | bool ReleaseCamera();
13 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
14 | bool RecordFrame(ImagePacket& source);
15 | QString StartRecording(QString recFold, QString codec, int fps, int cols, int rows);
16 | bool StopRecording();
17 | bool IsOpened();
18 | bool SkipFrames(bool forward);
19 |
20 | private:
21 | cv::VideoCapture camera;
22 | cv::VideoWriter recFile;
23 | int nFrames;
24 | bool liveFeed;
25 | };
26 |
27 | #endif // OPENCVSOURCESINK_H
28 |
--------------------------------------------------------------------------------
/fmf-tools/Python/FMF.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | class FMF:
4 | def __init__(self,filename):
5 | f=open(filename,mode='rb')
6 | version,forLength=np.fromfile(f,np.uint32,2)
7 | formatting=f.read(forLength)
8 | bitsper,rows,cols=np.fromfile(f,np.uint32,3)
9 | bytesperChunk,N=np.fromfile(f,np.uint64,2)
10 | offsetHeader=f.tell()
11 | f.close()
12 |
13 | if (formatting!="RGB8"):
14 | if (bitsper==8):
15 | typeData=np.uint8
16 | elif (bitsper==16):
17 | typeData=np.uint16
18 |
19 | dataformat=np.dtype([('t',np.double),('I',typeData,(rows,cols))])
20 | else:
21 | dataformat=np.dtype([('t',np.double),('I',np.uint8,(rows,cols,3))])
22 |
23 | self.mapper=np.memmap(filename,dtype=dataformat,mode='r',offset=offsetHeader)
24 | self.I=self.mapper['I']
25 | self.T=self.mapper['t']
26 |
--------------------------------------------------------------------------------
/xvisourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef XVISOURCESINK_H
2 | #define XVISOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "imagesourcesink.h"
6 |
7 | class XviSourceSink : public ImageSourceSink
8 | {
9 | public:
10 | bool Init(QString params="");
11 | bool StartAcquisition(QString dev="0");
12 | bool StopAcquisition();
13 | bool ReleaseCamera();
14 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
15 | // bool RecordFrame(ImagePacket& source);
16 | // bool StartRecording(QString recFold, QString codec, int fps, int cols, int rows);
17 | // bool StopRecording();
18 | bool IsOpened();
19 | bool SkipFrames(bool forward);
20 |
21 | private:
22 | FILE * xvi;
23 | int xvinpixels;
24 | uint64_t headersize;
25 | uint64_t bytesperchunk;
26 | uint64_t footersize;
27 | uint32_t bitsperpixel;
28 | QString dataformat;
29 | int rows,cols;
30 | ulong nFrames;
31 | int currPos;
32 | };
33 |
34 | #endif // XVIBACKEND_H
35 |
--------------------------------------------------------------------------------
/icons.qrc:
--------------------------------------------------------------------------------
1 |
2 |
3 | icons/gtk-media-forward-ltr.png
4 | icons/gtk-media-forward-rtl.png
5 | icons/gtk-media-next-ltr.png
6 | icons/gtk-media-next-rtl.png
7 | icons/gtk-media-pause.png
8 | icons/gtk-media-play-ltr.png
9 | icons/gtk-media-previous-ltr.png
10 | icons/gtk-media-previous-rtl.png
11 | icons/gtk-media-record.png
12 | icons/gtk-media-rewind-ltr.png
13 | icons/gtk-media-rewind-rtl.png
14 | icons/gtk-media-stop.png
15 | icons/gtk-properties.png
16 | icons/gtk-go-back-ltr.png
17 | icons/gtk-open.png
18 | icons/gtk-media-record-timed.png
19 | icons/snapshot.png
20 |
21 |
22 |
--------------------------------------------------------------------------------
/regexsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef REGEXSOURCESINK_H
2 | #define REGEXSOURCESINK_H
3 |
4 | #include "imagesourcesink.h"
5 | #include
6 |
7 | class RegexSourceSink : public ImageSourceSink
8 | {
9 | public:
10 | bool Init(QString params="");
11 | bool StartAcquisition(QString dev="0");
12 | bool StopAcquisition();
13 | bool ReleaseCamera();
14 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
15 | bool RecordFrame(ImagePacket& source);
16 | QString StartRecording(QString recFold, QString codec, int fps, int cols, int rows);
17 | bool StopRecording();
18 | bool IsOpened();
19 | bool SkipFrames(bool forward);
20 |
21 | private:
22 |
23 | QStringList *goodFiles;
24 | QString dir;
25 | QString basename;
26 | QString extension;
27 | int index;
28 | int nFrames;
29 | QString pixFormat;
30 |
31 | QVector timestamps;
32 | QVector frames;
33 | double startTime;
34 |
35 | };
36 |
37 | #endif // REGEXSOURCESINK_H
38 |
--------------------------------------------------------------------------------
/ampliplugindialog.h:
--------------------------------------------------------------------------------
1 | #ifndef AMPLIPLUGINDIALOG_H
2 | #define AMPLIPLUGINDIALOG_H
3 | #include
4 | #include
5 | #include
6 | #include "imagepacket.h"
7 | #include
8 |
9 | namespace Ui {
10 | class AmpliPluginDialog;
11 | }
12 |
13 | class AmpliPluginDialog : public QDialog
14 | {
15 | Q_OBJECT
16 |
17 | public:
18 | explicit AmpliPluginDialog(QWidget *parent = nullptr);
19 | bool processImage(ImagePacket& currIm);
20 | ~AmpliPluginDialog();
21 |
22 | signals:
23 | void stateChanged(QMap newSettings);
24 |
25 | private slots:
26 | void on_activateBox_stateChanged(int);
27 |
28 | void on_rowSlider_sliderPressed();
29 | void on_rowSlider_sliderReleased();
30 | void on_rowSlider_valueChanged(int value);
31 |
32 | private:
33 | bool extractData();
34 | bool tSliderPressed;
35 | bool activated;
36 | bool axisDefined;
37 | QChart *myChart;
38 | int row;
39 |
40 |
41 |
42 |
43 | Ui::AmpliPluginDialog *ui;
44 | };
45 |
46 | #endif // AMPLIPluginDIALOG_H
47 |
--------------------------------------------------------------------------------
/fileinputdialog.h:
--------------------------------------------------------------------------------
1 | #ifndef FILEINPUTDIALOG_H
2 | #define FILEINPUTDIALOG_H
3 |
4 | #include
5 | #include
6 |
7 | namespace Ui {
8 | class FileInputDialog;
9 | }
10 |
11 | class FileInputDialog : public QDialog
12 | {
13 | Q_OBJECT
14 |
15 | public:
16 | explicit FileInputDialog(QWidget *parent = 0);
17 | ~FileInputDialog();
18 |
19 | signals:
20 | void StaticPicPressed(QString namegiven);
21 | void OpencvFeedPressed();
22 | void MoviePressed(QString namegiven);
23 | void AvtFeedPressed();
24 | void VimbaFeedPressed();
25 | void IdsFeedPressed();
26 | void CloseApp();
27 |
28 | private slots:
29 | void on_filePushButton_clicked();
30 | void on_camButton_clicked();
31 |
32 | void on_MovieButton_clicked();
33 |
34 | void on_pushButton_2_clicked();
35 |
36 | void on_AvtButton_clicked();
37 |
38 | void on_vimbaButton_clicked();
39 |
40 | void on_idsButton_clicked();
41 |
42 | private:
43 | Ui::FileInputDialog *ui;
44 | QDir currentDir;
45 | QWidget* parent;
46 | };
47 |
48 | #endif // FILEINPUTDIALOG_H
49 |
--------------------------------------------------------------------------------
/fmfsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef FMFSOURCESINK_H
2 | #define FMFSOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "imagesourcesink.h"
6 |
7 | class FmfSourceSink : public ImageSourceSink
8 | {
9 | public:
10 | bool Init(QString params="");
11 | bool StartAcquisition(QString dev="0");
12 | bool StopAcquisition();
13 | bool ReleaseCamera();
14 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
15 | bool RecordFrame(ImagePacket& source);
16 | QString StartRecording(QString recFold, QString codec, int fps, int cols, int rows);
17 | bool StopRecording();
18 | bool IsOpened();
19 | bool SkipFrames(bool forward);
20 |
21 | private:
22 | FILE * fmf;
23 | FILE * fmfrec;
24 | int fmfnpixels;
25 | long headersize;
26 | long recheadersize;
27 | long recNframespos;
28 | uint64_t bytesperchunk;
29 | uint32_t bitsperpixel;
30 | QString dataformat;
31 | int rows,cols;
32 | int nFrames;
33 | int currPos;
34 | QString basename;
35 |
36 | QVector timestamps;
37 | double startTime;
38 | };
39 |
40 | #endif // FMFBACKEND_H
41 |
--------------------------------------------------------------------------------
/kafkacontroller.h:
--------------------------------------------------------------------------------
1 | #ifndef KAFKACONTROLLER_H
2 | #define KAFKACONTROLLER_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include "librdkafka/rdkafka.h"
8 |
9 | class KafkaController : public QObject
10 | {
11 | Q_OBJECT
12 | public:
13 | explicit KafkaController(QString topic,QString groupId,QString host,QString earliestLatest,bool randomId,int timeout,QObject *parent = nullptr);
14 |
15 | rd_kafka_t* makeConsumerHandle(QString topic,QString groupId,QString host,QString earliestLatest,bool randomId);
16 | QList consumeMsg(rd_kafka_t* handle,int timeout,int maxMessages=100);
17 |
18 | signals:
19 | void sendNewData(QList data);
20 | void testNewData(QString sometext);
21 |
22 | public slots:
23 | void willStartTheTimer(int interval);
24 | void willStopTheTimer();
25 | void timedConsumption();
26 |
27 | private:
28 | QString topic;
29 | QTimer* timer;
30 | rd_kafka_t* consumerHdle;
31 | int timeout; //this is the timeout value for asking for new messages.
32 | };
33 |
34 | #endif // KAFKACONTROLLER_H
35 |
--------------------------------------------------------------------------------
/imagesourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef IMAGESOURCESINK_H
2 | #define IMAGESOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "QtGui"
6 |
7 | class Sleeper : public QThread {
8 | public:
9 | static void msleep(unsigned long msecs){QThread::msleep(msecs);}
10 | };
11 |
12 | class ImageSourceSink
13 | {
14 | public:
15 |
16 | virtual bool Init(QString params="");
17 | virtual bool StartAcquisition(QString dev="0");
18 | virtual bool StopAcquisition();
19 | virtual bool ReleaseCamera();
20 | virtual bool GrabFrame(ImagePacket& target,int indexIncrement=1);
21 | virtual bool RecordFrame(ImagePacket& source);
22 | virtual QString StartRecording(QString recFold, QString codec,int fps, int cols, int rows);
23 | virtual bool StopRecording();
24 | virtual bool IsOpened();
25 | virtual double SetInterval(double msec);
26 | virtual bool SetRoiRows(int rows);
27 | virtual bool SetRoiCols(int cols);
28 | virtual bool SetShutter(int shutTime);
29 | virtual int SetAutoShutter(bool fitRange);
30 | virtual bool SkipFrames(bool forward);
31 | virtual ~ImageSourceSink() {}
32 | };
33 |
34 | #endif // IMAGESOURCESINK_H
35 |
--------------------------------------------------------------------------------
/hdf5sourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef HDF5SOURCESINK_H
2 | #define HDF5SOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "imagesourcesink.h"
6 | #include "H5Cpp.h"
7 |
8 |
9 | class Hdf5SourceSink : public ImageSourceSink //#include
10 | {
11 | public:
12 | bool Init(QString params="");
13 | bool StartAcquisition(QString dev="0");
14 | bool StopAcquisition();
15 | bool ReleaseCamera();
16 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
17 | bool RecordFrame(ImagePacket& source);
18 | QString StartRecording(QString recFold, QString codec, int fps, int cols, int rows);
19 | bool StopRecording();
20 | bool IsOpened();
21 | bool SkipFrames(bool forward);
22 |
23 | private:
24 | H5::H5File *hFile;
25 | H5::DataSpace dataspace;
26 | H5::DataSet dataset;
27 | H5T_class_t dataclass;
28 | H5::DataSpace* memspace;
29 | hsize_t dims[3];
30 | hsize_t recrows,reccols;
31 | H5::DataType readType;
32 | H5::DSetCreatPropList cparms;
33 |
34 | unsigned long index;
35 |
36 | cv::Mat frame;
37 | QString dataformat;
38 | std::vector timestamps;
39 | bool timepresent;
40 |
41 | };
42 |
43 | #endif // HDF5BACKEND_H
44 |
--------------------------------------------------------------------------------
/cameracontrolsdialog.h:
--------------------------------------------------------------------------------
1 | #ifndef CAMERACONTROLSDIALOG_H
2 | #define CAMERACONTROLSDIALOG_H
3 |
4 | #include
5 | #include
6 | #include "imagepacket.h"
7 |
8 | namespace Ui {
9 | class CameraControlsDialog;
10 | }
11 |
12 | class CameraControlsDialog : public QDialog
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit CameraControlsDialog(QWidget *parent = 0);
18 | void GotNewShutterSpeed(int shut);
19 | ~CameraControlsDialog();
20 |
21 | signals:
22 | void NeedNewSample();
23 | void SetShutterSpeed(int shut);
24 | void SetAutoShutter(bool fitRange);
25 | void SetRoiRows(int rows);
26 | void SetRoiCols(int cols);
27 |
28 | public slots:
29 | void GotNewSample(ImagePacket imP);
30 |
31 |
32 | private slots:
33 | void on_IntensityButton_clicked();
34 | void on_shutterSpinBox_valueChanged(int arg1);
35 |
36 | void on_FitRangeButton_clicked();
37 |
38 | void on_FitMeanButton_clicked();
39 |
40 | void on_ROIRows_editingFinished();
41 |
42 | void on_ROICols_editingFinished();
43 |
44 | void on_pushButton_2_clicked();
45 |
46 | private:
47 | Ui::CameraControlsDialog *ui;
48 | int shutSpeed;
49 | };
50 |
51 | #endif // CAMERACONTROLSDIALOG_H
52 |
--------------------------------------------------------------------------------
/ellipsedetectiondialog.h:
--------------------------------------------------------------------------------
1 | #ifndef ELLIPSEDETECTIONDIALOG_H
2 | #define ELLIPSEDETECTIONDIALOG_H
3 | #include
4 | #include
5 |
6 | namespace Ui {
7 | class EllipseDetectionDialog;
8 | }
9 |
10 | class EllipseDetectionDialog : public QDialog
11 | {
12 | Q_OBJECT
13 |
14 | public:
15 | explicit EllipseDetectionDialog(QWidget *parent = 0);
16 | ~EllipseDetectionDialog();
17 |
18 | signals:
19 | void stateChanged(QMap newSettings);
20 |
21 | private slots:
22 | void on_activateBox_stateChanged(int val);
23 | void on_feedbackButton_clicked(bool checked);
24 |
25 | void on_thresholdSlider_sliderPressed();
26 | void on_thresholdSlider_sliderReleased();
27 | void on_thresholdSlider_valueChanged(int value);
28 |
29 | void on_MinDiameter_sliderPressed();
30 | void on_MinDiameter_sliderReleased();
31 | void on_MinDiameter_valueChanged(int value);
32 |
33 | void on_MaxDiameter_sliderPressed();
34 | void on_MaxDiameter_sliderReleased();
35 | void on_MaxDiameter_valueChanged(int value);
36 |
37 | private:
38 | bool extractData();
39 | bool tSliderPressed,xSliderPressed,ySliderPressed;
40 | bool feedback;
41 |
42 |
43 |
44 |
45 | Ui::EllipseDetectionDialog *ui;
46 | };
47 |
48 | #endif // EllipseDetectionDIALOG_H
49 |
--------------------------------------------------------------------------------
/fmfbufferedsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef FMFBUFFEREDSOURCESINK_H
2 | #define FMFBUFFEREDSOURCESINK_H
3 |
4 | #include "imagepacket.h"
5 | #include "imagesourcesink.h"
6 |
7 | class FmfBufferedSourceSink : public ImageSourceSink
8 | {
9 | public:
10 | bool Init(QString params="");
11 | bool StartAcquisition(QString dev="0");
12 | bool StopAcquisition();
13 | bool ReleaseCamera();
14 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
15 | bool RecordFrame(ImagePacket& source);
16 | QString StartRecording(QString recFold, QString codec, int fps, int reccols, int recrows);
17 | bool StopRecording();
18 | bool IsOpened();
19 | bool SkipFrames(bool forward);
20 |
21 | private:
22 | FILE * fmf;
23 | FILE * fmfrec;
24 | int fmfnpixels;
25 | long headersize;
26 | long recheadersize;
27 | long recNframespos;
28 | uint64_t bytesperchunk;
29 | uint32_t bitsperpixel;
30 | QString dataformat;
31 | int rows,cols;
32 | int nFrames;
33 | int currPos;
34 | QString basename;
35 |
36 | QVector timestamps;
37 | QVector frames;
38 | double startTime;
39 | bool recording;
40 | };
41 |
42 | QDataStream &operator<<(QDataStream &out, const cv::Mat &matrix);
43 | //QDataStream &operator>>(QDataStream &in, cv::Mat &matrix); // do later perhaps
44 |
45 | #endif // FMFBUFFERED_H
46 |
--------------------------------------------------------------------------------
/vimbaframeobserver.h:
--------------------------------------------------------------------------------
1 | #ifndef VIMBAFRAMEOBSERVER
2 | #define VIMBAFRAMEOBSERVER
3 |
4 | #include
5 |
6 | #include
7 | #include
8 | #include
9 | #include "cambackend.h"
10 |
11 | #include
12 |
13 | class VimbaFrameObserver : public QObject, virtual public AVT::VmbAPI::IFrameObserver
14 | {
15 | Q_OBJECT
16 |
17 | public:
18 | // We pass the camera that will deliver the frames to the constructor
19 | VimbaFrameObserver( AVT::VmbAPI::CameraPtr pCamera, CamBackend* consumer);
20 |
21 | // This is our callback routine that will be executed on every received frame
22 | virtual void FrameReceived( const AVT::VmbAPI::FramePtr pFrame );
23 |
24 | // After the camBackend has been notified about a new frame it can pick it up
25 | AVT::VmbAPI::FramePtr GetFrame();
26 |
27 | // Clears the double buffer frame queue
28 | void ClearFrameQueue();
29 |
30 | signals:
31 | // The frame received event that passes the frame directly
32 | void FrameReceivedSignal( int status );
33 |
34 |
35 | private:
36 | // Since a Qt signal cannot contain a whole frame
37 | // the frame observer stores all FramePtr
38 | std::queue allFrames;
39 | QMutex allFramesMutex;
40 |
41 |
42 |
43 | };
44 |
45 | #endif
46 |
--------------------------------------------------------------------------------
/idssourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef IDSSOURCESINK_H
2 | #define IDSSOURCESINK_H
3 |
4 | #include
5 |
6 | #include
7 | #include "imagepacket.h"
8 | #include "imagesourcesink.h"
9 | #include
10 |
11 | class IdsSourceSink : public ImageSourceSink
12 | {
13 | public:
14 | bool Init(QString params="");
15 | bool StartAcquisition(QString dev="-1");
16 | bool StopAcquisition();
17 | bool ReleaseCamera();
18 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
19 | bool IsOpened();
20 | double SetInterval(double msec);
21 | bool SetShutter(int shutTime);
22 | int SetAutoShutter(bool fitRange);
23 | bool SetRoiRows(int rows);
24 | bool SetRoiCols(int cols);
25 | bool SetColourMode(bool useHighQuality); // quality mode for debayering
26 | bool SetPixelClock(int selection);
27 |
28 | private:
29 | HIDS hCam;
30 | char* imgMem;
31 | int memId;
32 | int flagIDS;
33 | UEYEIMAGEINFO ImageInfo;
34 | #ifdef Q_OS_WIN32
35 | HANDLE hEvent;
36 | #endif
37 |
38 | int Index;
39 | unsigned long Last;
40 | int rows,cols;
41 | int maxHeight,maxWidth;
42 | double camTimeStep;
43 | double camTimeOffset;
44 | cv::Mat buffer;
45 | double timeOffset;
46 | // QVector matFrames;
47 | QString format;
48 | QString source;
49 |
50 | };
51 |
52 | #endif // IDSSOURCESINK_H
53 |
--------------------------------------------------------------------------------
/imagesourcesink.cpp:
--------------------------------------------------------------------------------
1 | #include "imagesourcesink.h"
2 |
3 | bool ImageSourceSink::Init(QString) {
4 | return false;
5 | }
6 |
7 | bool ImageSourceSink::StartAcquisition(QString) {
8 | return false;
9 | }
10 |
11 | bool ImageSourceSink::GrabFrame(ImagePacket&,int) {
12 | return false;
13 | }
14 |
15 | bool ImageSourceSink::StopAcquisition() {
16 | return false;
17 | }
18 |
19 | bool ImageSourceSink::ReleaseCamera() {
20 | return false;
21 | }
22 |
23 | QString ImageSourceSink::StartRecording(QString,QString,int,int,int) {
24 | return QString("");
25 | }
26 |
27 | bool ImageSourceSink::RecordFrame(ImagePacket&) {
28 | return false;
29 | }
30 |
31 | bool ImageSourceSink::StopRecording() {
32 | return false;
33 | }
34 |
35 | bool ImageSourceSink::IsOpened() {
36 | return false;
37 | }
38 |
39 | double ImageSourceSink::SetInterval(double) {
40 | qInfo("Setting fps not supported for this source.");
41 | return 0;
42 | }
43 |
44 | bool ImageSourceSink::SetRoiRows(int) {
45 | return false;
46 | }
47 |
48 | bool ImageSourceSink::SetRoiCols(int) {
49 | return false;
50 | }
51 |
52 | bool ImageSourceSink::SetShutter(int) {
53 | qInfo("Setting shutter not supported for this source.");
54 | return false;
55 | }
56 |
57 | int ImageSourceSink::SetAutoShutter(bool)
58 | {
59 | return 0;
60 | }
61 |
62 | bool ImageSourceSink::SkipFrames(bool)
63 | {
64 | return false;
65 | }
66 |
67 |
68 |
--------------------------------------------------------------------------------
/coordinator.h:
--------------------------------------------------------------------------------
1 | #ifndef COORDINATOR_H
2 | #define COORDINATOR_H
3 |
4 | #include
5 | #include
6 | //#include "maingui.h"
7 | #include "picbackend.h"
8 | #include "cambackend.h"
9 |
10 | class Coordinator : public QObject
11 | {
12 | Q_OBJECT
13 | public:
14 | Coordinator(QObject *parent = nullptr);
15 |
16 | signals:
17 | void NewImageReady(ImagePacket im);
18 | void shutterChanged(int newTime);
19 | void fpsChanged(double msec);
20 |
21 | public slots:
22 | void controlCameraThread(bool startNew,QString dev="0");
23 | void LoadNewMovie(QString);
24 | void StartNewAVT(bool startNew);
25 | void StartNewVimba(bool startNew);
26 | void StartNewIds(bool startNew);
27 | void changeFps(double newFps);
28 | void stopAcquisition();
29 | void changeShutter (int time);
30 | void setAutoShutter (bool fitRange);
31 | void setRoiRows(int rows);
32 | void setRoiCols(int cols);
33 | void skipForwardBackward(bool forward);
34 | void StartRecording(bool start, QString recFold="", QString codec="",int skip=0);
35 | void newPluginSettingsReceived(QMap settings);
36 |
37 | public:
38 | // void setGui(MainGui* myGui);
39 |
40 | private:
41 | // MainGui* theGui;
42 | bool guiMode;
43 | PicBackend picBack;
44 | CamBackend camBack;
45 | bool opencvRunning;
46 | bool avtRunning;
47 | QThread* backendThread;
48 |
49 | };
50 |
51 | #endif // COORDINATOR_H
52 |
--------------------------------------------------------------------------------
/avtsourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef AVTSOURCESINK_H
2 | #define AVTSOURCESINK_H
3 |
4 | //defines necessary for PvApi header..
5 | #ifdef Q_OS_WINDOWS
6 | #define WIN32_LEAN_AND_MEAN
7 | #include
8 | #include
9 | #else
10 | #define _LINUX
11 | #define _x64
12 | #endif
13 |
14 | #include "PvApi.h"
15 |
16 | #include
17 | #include "imagepacket.h"
18 | #include "imagesourcesink.h"
19 | #include
20 |
21 | class AvtSourceSink : public ImageSourceSink
22 | {
23 | public:
24 | bool Init(QString params="");
25 | bool StartAcquisition(QString dev="0");
26 | bool StopAcquisition();
27 | bool ReleaseCamera();
28 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
29 | bool IsOpened();
30 | double SetInterval(double msec);
31 | bool SetShutter(int shutTime);
32 | int SetAutoShutter(bool fitRange);
33 |
34 | private:
35 |
36 | // this defines the buffersize
37 | #define FRAMESCOUNT 50
38 |
39 | typedef struct
40 | {
41 | unsigned long UID;
42 | tPvHandle Handle;
43 | tPvFrame Frames[FRAMESCOUNT];
44 | bool Stop;
45 | unsigned long Discarded;
46 | } tCamera;
47 |
48 | tCamera GCamera;
49 | int Index;
50 | unsigned long Last;
51 | int rows,cols;
52 | double camTimeStep;
53 | double camTimeOffset;
54 | cv::Mat buffer;
55 | QVector matFrames;
56 |
57 | };
58 |
59 | #endif // AVTSOURCESINK_H
60 |
--------------------------------------------------------------------------------
/interferoplugindialog.h:
--------------------------------------------------------------------------------
1 | #ifndef INTERFEROPLUGINDIALOG_H
2 | #define INTERFEROPLUGINDIALOG_H
3 | #include
4 | #include
5 | #include
6 |
7 | #include "imagepacket.h"
8 |
9 | namespace Ui {
10 | class InterferoPluginDialog;
11 | }
12 |
13 | class InterferoPluginDialog : public QDialog
14 | {
15 | Q_OBJECT
16 |
17 | public:
18 | explicit InterferoPluginDialog(QWidget *parent = nullptr);
19 | bool processImage(ImagePacket& currIm);
20 | ~InterferoPluginDialog();
21 |
22 | signals:
23 | void stateChanged(QMap newSettings);
24 |
25 | private slots:
26 | void on_activateBox_stateChanged(int val);
27 | void on_newReferenceButton_clicked();
28 |
29 | void on_subsampleSlider_sliderPressed();
30 | void on_subsampleSlider_sliderReleased();
31 | void on_subsampleSlider_valueChanged(int value);
32 |
33 | void on_skipSlider_sliderPressed();
34 | void on_skipSlider_sliderReleased();
35 | void on_skipSlider_valueChanged(int value);
36 |
37 | private:
38 | bool extractData();
39 | void fftshift(cv::Mat& I,bool forward);
40 | void filterDft(cv::Mat& I,int startC,int stopC);
41 | void PeakFinder(cv::Mat input,int* rowPos,int* colPos);
42 | void centrePeak(cv::Mat I,int rowPeak,int colPeak);
43 | void adaptForScreen(cv::Mat& I);
44 |
45 | bool tSliderPressed,ssSliderPressed,skSliderPressed;
46 | bool activated,newReference;
47 | int rowPeak,colPeak;
48 | int subsample,skip;
49 | int frameCounter;
50 | double fringePeriod,fringeAngle;
51 |
52 |
53 |
54 |
55 | Ui::InterferoPluginDialog *ui;
56 | };
57 |
58 | #endif // InterferoPluginDIALOG_H
59 |
--------------------------------------------------------------------------------
/vimbasourcesink.h:
--------------------------------------------------------------------------------
1 | #ifndef VIMBASOURCESINK_H
2 | #define VIMBASOURCESINK_H
3 |
4 | //defines necessary for VIMBA header..
5 | #ifdef WIN32
6 | #include
7 | #else
8 | #include
9 | #include
10 | #endif
11 |
12 | #include
13 |
14 |
15 | // now other imports
16 | #include
17 | #include
18 | #include
19 | #include "imagepacket.h"
20 | #include "imagesourcesink.h"
21 | #include "vimbaframeobserver.h"
22 | #include
23 | #include "cambackend.h"
24 |
25 |
26 | class VimbaSourceSink : public ImageSourceSink
27 | {
28 | public:
29 | VimbaSourceSink(CamBackend* parent);
30 | bool Init(QString params="");
31 | bool StartAcquisition(QString dev="0");
32 | bool StopAcquisition();
33 | bool ReleaseCamera();
34 | bool GrabFrame(ImagePacket& target,int indexIncrement=1);
35 | bool IsOpened();
36 | double SetInterval(double msec);
37 | bool SetRoiRows(int rows);
38 | bool SetRoiCols(int cols);
39 | bool SetShutter(int shutTime);
40 | int SetAutoShutter(bool fitRange);
41 | std::vector listPixelFormats();
42 | void setFormat(QString formatstring);
43 |
44 |
45 | private:
46 | std::vector listOptions(AVT::VmbAPI::FeaturePtr pFeature);
47 |
48 | AVT::VmbAPI::VimbaSystem & system;
49 | AVT::VmbAPI::CameraPtr pCamera;
50 | VimbaFrameObserver* frameWatcher;
51 | CamBackend* parent;
52 | int bufCount;
53 | VmbInt64_t height,width;
54 | VmbInt64_t maxHeight,maxWidth;
55 | double frameRate,exposure;
56 | VmbInt64_t camFreq,initialStamp;
57 | double timeOffset;
58 | QString format;
59 |
60 |
61 | };
62 |
63 | #endif
64 |
65 |
--------------------------------------------------------------------------------
/kafkafrontend.h:
--------------------------------------------------------------------------------
1 | #ifndef KAFKAFRONTEND_H
2 | #define KAFKAFRONTEND_H
3 |
4 | #include
5 | #include
6 | #include
7 |
8 | #include "imagepacket.h"
9 | #include "coordinator.h"
10 | #include "librdkafka/rdkafka.h"
11 | #include "kafkacontroller.h"
12 |
13 | class KafkaFrontend : public QObject
14 | {
15 | Q_OBJECT
16 | public:
17 | explicit KafkaFrontend(Coordinator *boss,QObject *parent = nullptr);
18 | void makePublisher(QString topic,QString host = QString("localhost:9092"));
19 | void makeConsumers(QString cfgtopic,QString cmdtopic,QString groupId,QString host = QString("localhost:9092"));
20 |
21 | signals:
22 | void startRecording(bool start,QString recfold,QString codec,int skipping);
23 | void implementNewFps(double fps);
24 | void setShutter(int time);
25 | void setRoiRows(int rows);
26 | void setRoiCols(int cols);
27 |
28 | void startTheTimer(int interval);
29 | void stopTheTimer();
30 |
31 | public slots:
32 | void newImageReceived(ImagePacket theMatrix);
33 | void actOnCommands(QList commands);
34 | void actOnConfig(QList configs);
35 |
36 | private:
37 | void publishMsg(QString key, QJsonDocument value);
38 | void changeParameters(QJsonObject instructs);
39 | void changeRecording(bool start,QJsonObject parameters);
40 |
41 | // producer vars
42 | rd_kafka_t *prodHndl;
43 | rd_kafka_topic_t *prodTopicHndl;
44 | // consumer vars
45 | KafkaController *cmdPointer;
46 | KafkaController *cfgPointer;
47 | QThread* cmdThread;
48 | QThread* cfgThread;
49 | int skipLogging;
50 |
51 | int timeout;
52 | // camera parameters
53 | bool isRecording;
54 | };
55 |
56 | #endif // KAFKAFRONTEND_H
57 |
--------------------------------------------------------------------------------
/playbackdialog.h:
--------------------------------------------------------------------------------
1 | #ifndef PLAYBACKDIALOG_H
2 | #define PLAYBACKDIALOG_H
3 |
4 | #include
5 | #include
6 | #if QT_VERSION >= 0x050000
7 | #include
8 | #endif
9 |
10 | namespace Ui {
11 | class PlaybackDialog;
12 | }
13 |
14 | class PlaybackDialog : public QDialog
15 | {
16 | Q_OBJECT
17 |
18 | public:
19 | explicit PlaybackDialog(QWidget *parent = 0);
20 | ~PlaybackDialog();
21 |
22 | signals:
23 | void stopPlayback();
24 | void newFps(double fps);
25 | void jumpFrames(bool forward);
26 | void recordNow(bool checked,QString recFold, QString codec,int recordSkip);
27 | void recordSnapshot(QString SnapshotName);
28 |
29 | private slots:
30 | void on_stopButton_clicked();
31 | void on_ffwdButton_clicked();
32 | void on_rwdButton_clicked();
33 | void on_playButton_toggled(bool checked);
34 | void on_recButton_toggled(bool checked);
35 | void on_RecSettings_clicked();
36 | void on_backButton_clicked();
37 | void on_toolButton_clicked();
38 | void togglePlay();
39 | void reversePlay();
40 | void forwardPlay();
41 | void on_fpsEdit_returnPressed();
42 | void on_horizontalSlider_valueChanged(int value);
43 | void on_recTimedButton_toggled(bool checked);
44 | void finishedFirstTimer();
45 | void finishedSecondTimer();
46 |
47 | void on_snapshotButton_clicked();
48 |
49 | public slots:
50 | void newFrameNumberReceived(int nr);
51 | void showNewFps(double msec);
52 |
53 | private:
54 | bool parseInstruct(QString instruct, int& sec, double &msecdelay);
55 |
56 |
57 | Ui::PlaybackDialog *ui;
58 | double currentTimer;
59 | bool recording;
60 | bool have2timers;
61 | double secondDelay;
62 | QTimer timer1,timer2; // typical use case only requires two timers
63 | QString config1,config2; // to go with the two timers
64 | };
65 |
66 | #endif // PLAYBACKDIALOG_H
67 |
--------------------------------------------------------------------------------
/vimbaframeobserver.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include "cambackend.h"
3 |
4 | using namespace AVT::VmbAPI;
5 |
6 | VimbaFrameObserver::VimbaFrameObserver(CameraPtr pCamera, CamBackend *consumer) : IFrameObserver( pCamera ) {
7 | connect(this,SIGNAL(FrameReceivedSignal(int)),consumer,SLOT(GrabFrame()));
8 | }
9 |
10 |
11 | void VimbaFrameObserver::FrameReceived( const FramePtr pFrame )
12 | {
13 | bool bQueueDirectly = true;
14 | VmbFrameStatusType eReceiveStatus;
15 |
16 | if( VmbErrorSuccess == pFrame->GetReceiveStatus( eReceiveStatus ) )
17 | {
18 | // Lock the frame queue
19 | allFramesMutex.lock();
20 | // Add frame to queue
21 | allFrames.push( pFrame );
22 | // Unlock frame queue
23 | allFramesMutex.unlock();
24 | // Emit the frame received signal
25 | emit FrameReceivedSignal( eReceiveStatus );
26 | bQueueDirectly = false;
27 | }
28 |
29 | // If any error occurred we queue the frame without notification
30 | if( true == bQueueDirectly )
31 | {
32 | m_pCamera->QueueFrame( pFrame ); //m_pCamera is the inherent name of the camera pointer of an IFrameObserver
33 | }
34 | }
35 |
36 | // Returns the oldest frame that has not been picked up yet
37 | FramePtr VimbaFrameObserver::GetFrame()
38 | {
39 | // Lock the frame queue
40 | allFramesMutex.lock();
41 | // Pop frame from queue
42 | FramePtr res = allFrames.front();
43 | allFrames.pop();
44 | // Unlock frame queue
45 | allFramesMutex.unlock();
46 | return res;
47 | }
48 |
49 | void VimbaFrameObserver::ClearFrameQueue()
50 | {
51 | // Lock the frame queue
52 | allFramesMutex.lock();
53 | // Clear the frame queue and release the memory
54 | std::queue empty;
55 | std::swap( allFrames, empty );
56 | // Unlock the frame queue
57 | allFramesMutex.unlock();
58 | }
59 |
--------------------------------------------------------------------------------
/cambackend.h:
--------------------------------------------------------------------------------
1 | #ifndef CAMBACKEND_H
2 | #define CAMBACKEND_H
3 |
4 | #include
5 | #include
6 | #include "imagepacket.h"
7 | #include "imagesourcesink.h"
8 | #ifdef ELLIPSE
9 | #include "ellipsedetection.h"
10 | #endif
11 |
12 | class CamBackend : public QThread
13 | {
14 | Q_OBJECT
15 | public:
16 | explicit CamBackend(QObject *parent = nullptr);
17 | bool StartAcquisition(QString dev="0");
18 | void StopAcquisition();
19 | void ReleaseCamera();
20 | void SetInterval(double newInt);
21 | void SetShutter(int shut);
22 | void SetAutoShutter(bool fitRange);
23 | void setRoiRows(int rows);
24 | void setRoiCols(int cols);
25 | void AdaptForDisplay(ImagePacket& newIm);
26 | void skipForwardBackward(bool forward);
27 | void StartRecording(bool start, QString recFold="", QString codec="",int skip=0);
28 | void changedPluginSettings(QMap settings);
29 | void doPlugin(ImagePacket& currIm);
30 |
31 |
32 | signals:
33 | void NewImageReady(ImagePacket im);
34 | void shutterChanged(int newTime);
35 | void fpsChanged(double msec);
36 | void startTheTimer(int interval);
37 | void stopTheTimer();
38 |
39 | public slots:
40 | void GrabFrame();
41 | void willStartTheTimer(int interval);
42 | void willStopTheTimer();
43 |
44 |
45 | private:
46 | void run();
47 | void record();
48 |
49 |
50 | ImageSourceSink *currSink, *currSource;
51 | ImagePacket currImage;
52 | bool liveMode;
53 | bool recording;
54 | double timerInterval;
55 | QTimer* timer;
56 | bool reversePlay;
57 | bool isPaused;
58 | bool needTimer;
59 | bool doesCallBack;
60 | bool running;
61 | QString format;
62 | bool doPluginProcessing;
63 | int skipImages;
64 | int recSkip;
65 | bool stoppingRecording;
66 | QString recFileName;
67 | QString origin;
68 |
69 |
70 |
71 | #ifdef ELLIPSE
72 | EllipseDetection ellipse;
73 | #endif
74 |
75 |
76 | };
77 |
78 | #endif // CAMBACKEND_H
79 |
--------------------------------------------------------------------------------
/fileinputdialog.cpp:
--------------------------------------------------------------------------------
1 | #include "fileinputdialog.h"
2 | #include "ui_fileinputdialog.h"
3 | #include
4 |
5 | FileInputDialog::FileInputDialog(QWidget *parent) :
6 | QDialog(parent),
7 | ui(new Ui::FileInputDialog),parent(parent)
8 | {
9 | ui->setupUi(this);
10 |
11 | #ifndef PVAPI
12 | ui->AvtButton->setEnabled(false);
13 | #endif
14 |
15 |
16 | #ifndef IDS
17 | ui->idsButton->setEnabled(false);
18 | #endif
19 |
20 | #ifndef VIMBA
21 | ui->vimbaButton->setEnabled(false);
22 | #endif
23 |
24 | currentDir=QDir::home();
25 | }
26 |
27 | FileInputDialog::~FileInputDialog()
28 | {
29 | delete ui;
30 | }
31 |
32 | // load picture from disk button
33 | void FileInputDialog::on_filePushButton_clicked()
34 | {
35 | QString resp = QFileDialog::getOpenFileName(parent,tr("Open Picture"),
36 | currentDir.absolutePath(), tr("All files (*.*)") );
37 | if (resp=="") {
38 | currentDir=QDir::home();
39 | } else {
40 | currentDir=QDir(resp);
41 | }
42 | emit StaticPicPressed(resp);
43 | }
44 |
45 | // load opencvFeed
46 | void FileInputDialog::on_camButton_clicked() {
47 | emit OpencvFeedPressed();
48 | }
49 |
50 | // load movie (from disk) button
51 | void FileInputDialog::on_MovieButton_clicked()
52 | {
53 | QString resp = QFileDialog::getOpenFileName(parent,tr("Open Movie"),
54 | currentDir.absolutePath(), tr("All files (*.*)") );
55 | if (resp=="") {
56 | currentDir=QDir::home();
57 | } else {
58 | currentDir=QDir(resp);
59 | }
60 | emit MoviePressed(resp);
61 | }
62 |
63 | // exit button
64 | void FileInputDialog::on_pushButton_2_clicked()
65 | {
66 | emit CloseApp();
67 | }
68 |
69 | // Avt (PvAPI) feed (Prosilica)
70 | void FileInputDialog::on_AvtButton_clicked()
71 | {
72 | emit AvtFeedPressed();
73 | }
74 |
75 | void FileInputDialog::on_vimbaButton_clicked()
76 | {
77 | emit VimbaFeedPressed();
78 | }
79 |
80 | void FileInputDialog::on_idsButton_clicked()
81 | {
82 | emit IdsFeedPressed();
83 | }
84 |
--------------------------------------------------------------------------------
/README.txt:
--------------------------------------------------------------------------------
1 | The goal of this program is to create a Qt and opengl-based viewer capable of working
2 | with scientific cameras (GigE and USB3 based), modify their important settings such as
3 | framerate and exposure levels and allow to save the acquired images to disk as fast as possible.
4 |
5 | The program is compatible with the PvApi, Vimba and IDS (µ-eye) SDK's, which allows it to connect
6 | to many GigE-compliant and USB3 cameras (and Firewire in Windows). Through OpenCV, many other cameras can be accessed such as webcams.
7 | The program is cross-platform and works on Linux, Mac and Windows. However, most cameras are only supported on Linux
8 | and Windows due to restrictions of the underlying libraries. The Mac version serves mostly to view the recorded videos for now.
9 |
10 | Recording to disk is focussed on the scientific usage of the data and therefore does not use typical movie
11 | containers such as AVI or MPEG. Instead, movies are stored to HDF5 (=> https://www.hdfgroup.org/HDF5/) or FMF (=> http://code.astraw.com/projects/motmot/fly-movie-format.html) formats in which
12 | a precise timestamp can be recorded per image (hence allowing variable framerates).
13 | This also allows easy access to the video as a 3D matrix from Python and other languages.
14 |
15 | Installation:
16 | -------------
17 | * Detailed instructions for installation on Linux, Mac and Windows can be found in the CompilingLinux/Mac/Windows text files.
18 | * Precompiled (standalone) binaries are available for Windows which do not require any installation.
19 | * The format of the recorded FMF files is documented on http://code.astraw.com/projects/motmot/fly-movie-format.html. From this site,
20 | also some tools can be downloaded for converting these files. A simple access from python is also possible using the included FMF.py file.
21 | To use it do:
22 | import FMF
23 | vid=FMF.FMF("myfile.fmf")
24 | vid.I #=> this is the 3D-matrix containing your movie; to show a single image do imshow(vid.I[0])
25 | vid.T #=> This contains the corresponding timestamps : print vid.T[0]
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ampliplugindialog.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | AmpliPluginDialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 494
10 | 401
11 |
12 |
13 |
14 |
15 | 1
16 | 1
17 |
18 |
19 |
20 | Amplitude Analysis
21 |
22 |
23 | 0.800000000000000
24 |
25 |
26 | -
27 |
28 |
-
29 |
30 |
31 | Row
32 |
33 |
34 |
35 | -
36 |
37 |
38 | Activate Processing
39 |
40 |
41 |
42 | -
43 |
44 |
45 | 1
46 |
47 |
48 | 50
49 |
50 |
51 | Qt::Horizontal
52 |
53 |
54 |
55 | -
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 | QChartView
65 | QGraphicsView
66 |
67 |
68 |
69 |
70 |
71 |
72 |
--------------------------------------------------------------------------------
/CompilingLinux.txt:
--------------------------------------------------------------------------------
1 | Compiling on Linux
2 | ------------------
3 | * install gcc (on Ubuntu something like build-essentials)
4 | * install qtcreator
5 | * install git
6 | * install opencv (development version)
7 | * install hdf5 (development version)
8 | * Now open QtCreator and click New Project - Import Project - Git Clone and for repository type:
9 | https://github.com/SamDehaeck/GigaViewer . Now select where you want to save and click continue.
10 | * Take a look at the configuration options mentionned in the Gigaviewer.pro file, to select the options you want to include. For the different camera backends, you need to install their 'drivers' first (to be found on their websites).
11 | For the first compilation tests, it is best to leave all extra features turned off. In this way, the correct functioning of the Qt and Opencv libraries is checked.
12 | The Hdf5 option is a usefull extension, as it gives access to a more 'universal' single container movie format which is easily accessible from many different programs.
13 | The 'Tracking' option is work in progress to illustrate real time processing of the recorded images. It has no real use for the moment
14 | Before you enable the different camera backends, you need to install their respective 'drivers' first (to be found on their websites).
15 | * Inside this GigaViewer.pro file you can also find the compilation instructions for each platform. If the compilation does not work, check here if all the
16 | include directories are correct and library linking directories.
17 |
18 | Post-install tips for Linux:
19 | ----------------------------
20 |
21 | * The included gigaviewer.desktop file should be copied to the ~/.local/share/applications folder and its executable bit set
22 | * The GigaViewer.svg file should be copied to ~/.local/share/icons folder
23 |
24 | Now you should be able to make a launcher e.g. on ubuntu by dragging the desktop file to the sidebar
25 | or e.g. on gnome3 by first starting the .desktop file by double-clicking it. The icon will be visible now in the sidebar; just add it to the favourites.
26 | Note that it could happen that it only shows up after a restart.
27 |
28 | To correctly associate the created .fmf movies with the application do the following:
29 |
30 | * copy video-fmf.xml to the ~/.local/share/mime/packages folders
31 | * copy GigaViewer.svg to /usr/share/icons/gnome/scalable/mimetypes/video-fmf.svg
32 | * update-mime-database ~/.local/share/mime
33 | * sudo gtk-update-icon-cache /usr/share/icons/gnome
34 |
35 | if this is not working, copy it to your iconset folder mimetypes/scalable folder on apply gtk-update-icon-cache to this folder
36 |
--------------------------------------------------------------------------------
/CompilingMac.txt:
--------------------------------------------------------------------------------
1 | Compiling on Mac:
2 | -----------------
3 | * Install XCode (mac store)
4 | * Install Qt (latest version for clang 64-bit works fine)
5 | * Install Cmake (Executing it for the first time should be allowed by ctrl-clicking on it and selecting open, double-clicking will not work.)
6 | * Opencv
7 | - Download and unzip
8 | - Make build directory
9 | - With CMake locate the source code directory and the build directory: press configure and generate (defaults are fine)
10 | - go with terminal to build directory
11 | - type: make (or make -j 4 if you want it to go faster)
12 | - type: sudo make install
13 | * HDF5: this is optional but allows to record into and read from HDF5 containers
14 | - Download and unzip
15 | - Make build directory
16 | - With CMake locate the source code directory and the build directory: press configure
17 | - switch on the OsXFrameworks option (not sure if this is necessary)
18 | - now press generate in the cmake interface
19 | - go with terminal to build directory
20 | - type: make (or make -j 4 if you want it to go faster)
21 | - type: sudo make install
22 | - Stopping here should be ok but for me, the compilation then worked but launching Gigaviewer failed due to some library mismatch problem
23 | => remove some faulty(?) symbolic links by typing the following (or rename them if you prefer):
24 | sudo rm /usr/local/HDF_Group/HDF5/1.8.16/lib/libhdf5.10.1.0.dylib to remove this symbolic link. Also type similar things to remove libhdf5_hl.10.1.0.dylib, libhdf5_cpp.10.1.0.dylib and libhdf5_hl_cpp.10.1.0.dylib
25 | * Now open QtCreator (located in /users/tips/Qt) and click New Project - Import Project - Git Clone and for repository type:
26 | https://github.com/SamDehaeck/GigaViewer . Now select where you want to save and click continue.
27 | * In the configuration options mentionned in the Gigaviewer.pro file, you need to comment out the IDS, VIMBA and PVAPI options as no backends are available yet (Ids and Vimba do not support it yet and PVAPI could be possible but was not checked).
28 | But for the first compilation tests, it is best to leave all extra features turned off (also HDF5 and Tracking). In this way, the correct functioning of the Qt and Opencv libraries is checked.
29 | The Hdf5 option is a usefull extension, as it gives access to a more 'universal' single container movie format which is easily accessible from many different programs.
30 | The 'Tracking' option is work in progress to illustrate real time processing of the recorded images. It has no real use for the moment.
31 | * Inside this GigaViewer.pro file you can also find the compilation instructions for each platform. If the compilation does not work, check here if all the
32 | include directories are correct and library linking directories.
33 |
--------------------------------------------------------------------------------
/ellipsedetectiondialog.cpp:
--------------------------------------------------------------------------------
1 | #include "ellipsedetectiondialog.h"
2 | #include "ui_ellipsedetectiondialog.h"
3 | #include
4 |
5 |
6 | EllipseDetectionDialog::EllipseDetectionDialog(QWidget *parent) :
7 | QDialog(parent),tSliderPressed(false),xSliderPressed(false),ySliderPressed(false),
8 | ui(new Ui::EllipseDetectionDialog)
9 | {
10 | ui->setupUi(this);
11 | extractData();
12 | }
13 |
14 | bool EllipseDetectionDialog::extractData() {
15 | QMap settings;
16 | settings["pluginName"]="EllipseDetection";
17 | settings["activated"]=ui->activateBox->isChecked();
18 | settings["threshold"]=ui->thresholdSlider->value();
19 | settings["MinD"]=ui->MinDiameter->value();
20 | settings["MaxD"]=ui->MaxDiameter->value();
21 | settings["showFeedback"]=ui->feedbackButton->isChecked();
22 | //qInfo()<<"Sending new state"<::const_iterator i = settings.constBegin();
27 | while (i != settings.constEnd()) {
28 | qDebug() << i.key() << ": " << i.value();
29 | ++i;
30 | }
31 | qDebug()<<"-----------";
32 | */
33 | return true;
34 | }
35 |
36 | EllipseDetectionDialog::~EllipseDetectionDialog()
37 | {
38 | delete ui;
39 | }
40 |
41 | void EllipseDetectionDialog::on_activateBox_stateChanged(int val)
42 | {
43 | extractData();
44 | }
45 |
46 | void EllipseDetectionDialog::on_feedbackButton_clicked(bool checked)
47 | {
48 | feedback=checked;
49 | extractData();
50 | }
51 |
52 |
53 |
54 | void EllipseDetectionDialog::on_thresholdSlider_sliderPressed()
55 | {
56 | tSliderPressed=true;
57 | }
58 |
59 | void EllipseDetectionDialog::on_thresholdSlider_sliderReleased()
60 | {
61 | tSliderPressed=false;
62 | extractData();
63 | }
64 |
65 | void EllipseDetectionDialog::on_thresholdSlider_valueChanged(int)
66 | {
67 | if (!tSliderPressed) {
68 | extractData();
69 | }
70 | }
71 |
72 | void EllipseDetectionDialog::on_MinDiameter_sliderPressed()
73 | {
74 | xSliderPressed=true;
75 | }
76 |
77 | void EllipseDetectionDialog::on_MinDiameter_sliderReleased()
78 | {
79 | xSliderPressed=false;
80 | extractData();
81 | }
82 |
83 | void EllipseDetectionDialog::on_MinDiameter_valueChanged(int)
84 | {
85 | if (!xSliderPressed) {
86 | extractData();
87 | }
88 | }
89 |
90 | void EllipseDetectionDialog::on_MaxDiameter_sliderPressed()
91 | {
92 | ySliderPressed=true;
93 | }
94 |
95 | void EllipseDetectionDialog::on_MaxDiameter_sliderReleased()
96 | {
97 | ySliderPressed=false;
98 | extractData();
99 | }
100 |
101 | void EllipseDetectionDialog::on_MaxDiameter_valueChanged(int)
102 | {
103 | if (!ySliderPressed) {
104 | extractData();
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/ampliplugindialog.cpp:
--------------------------------------------------------------------------------
1 | #include "ampliplugindialog.h"
2 | #include "ui_ampliplugindialog.h"
3 | #include
4 |
5 |
6 | AmpliPluginDialog::AmpliPluginDialog(QWidget *parent) :
7 | QDialog(parent),tSliderPressed(false),activated(false),axisDefined(false),
8 | ui(new Ui::AmpliPluginDialog)
9 | {
10 | ui->setupUi(this);
11 | extractData();
12 | }
13 |
14 | bool AmpliPluginDialog::extractData() {
15 | activated=ui->activateBox->isChecked();
16 | row=ui->rowSlider->value();
17 | return true;
18 | }
19 |
20 | bool AmpliPluginDialog::processImage(ImagePacket& currIm) {
21 | if (activated) {
22 | if (currIm.pixFormat=="RGB8") {
23 | cv::Mat grayIm;
24 | cv::cvtColor(currIm.image,grayIm,cv::COLOR_RGB2GRAY);
25 | currIm.image=grayIm.clone();
26 | currIm.pixFormat="MONO8"; // will make it fall through to the next part.
27 | }
28 | if (currIm.pixFormat=="MONO8") {
29 | int line=static_cast(currIm.image.rows*(row/100.0));
30 |
31 | // extract pixels and plot
32 | QLineSeries *series = new QLineSeries();
33 | for (int j=0;j(line,j));
35 | }
36 |
37 | if (not axisDefined) {
38 | myChart = new QChart();
39 | myChart->legend()->hide();
40 | myChart->addSeries(series);
41 | myChart->createDefaultAxes();
42 | myChart->setTitle("Extracted gray scale value");
43 | ui->pluginChart->setChart(myChart);
44 | axisDefined=true;
45 | } else {
46 | myChart->removeAllSeries();
47 | myChart->addSeries(series);
48 | myChart->createDefaultAxes();
49 | }
50 |
51 | // now indicate line in figure
52 | //if (tSliderPressed) {
53 | cv::line(currIm.image,cv::Point(0,line),cv::Point(currIm.image.cols,line),125,3);
54 | //}
55 |
56 | } else {
57 | qDebug()<<"Image format not yet supported! "<
5 | #include
6 | #include
7 | #if QT_VERSION >= 0x050000
8 | #include
9 | #endif
10 | #include "imagepacket.h"
11 | #include "videoglscene.h"
12 | #include "cameracontrolsdialog.h"
13 | #include "playbackdialog.h"
14 | #include "fileinputdialog.h"
15 | #include "coordinator.h"
16 |
17 | #ifdef ELLIPSE
18 | #include "ellipsedetectiondialog.h"
19 | #endif
20 |
21 | #ifdef INTERFERO
22 | #include "interferoplugindialog.h"
23 | #endif
24 |
25 | #ifdef AMPLI
26 | #include "ampliplugindialog.h"
27 | #endif
28 |
29 | class MainGui : public QGraphicsView
30 | {
31 | Q_OBJECT
32 | public:
33 | explicit MainGui(Coordinator *boss,QWidget *parent = nullptr);
34 | void returnToStart();
35 |
36 | signals:
37 | void newPicNeeded(QString theString);
38 | void newMovieNeeded(QString theString);
39 | void newOpencvFeedNeeded(bool start);
40 | void newAvtFeedNeeded(bool start);
41 | void newVimbaFeedNeeded(bool start);
42 | void newIdsFeedNeeded(bool start);
43 | void implementNewFps(double fps);
44 | void showNewFps(double msec);
45 | void startRecording(bool start,QString recfold,QString codec,int skipping);
46 | void closeApplic();
47 | void newSampleReady(ImagePacket matrix);
48 | void newFrameNrShowing(int nr);
49 | void setShutter(int time);
50 | void setAutoShutter(bool fitRange);
51 | void skipFrames(bool forward);
52 | void setRoiRows(int rows);
53 | void setRoiCols(int cols);
54 |
55 | void pluginSettingsChanged(QMap settings);
56 |
57 | public slots:
58 | void newImageReceived(ImagePacket theMatrix);
59 | void openCvFeedPressed();
60 | void AVTFeedPressed();
61 | void VimbaFeedPressed();
62 | void IdsFeedPressed();
63 | void newMoviePressed(QString theString);
64 | void stopButtonPressed();
65 | void gotNewFps(double fps);
66 | void gotNewFpsFromBackend(double fps);
67 | void gotNewShutSpeed(int shut);
68 | void needNewSample();
69 | void getSnapshot(QString location);
70 | void showPlaybackControls(bool visible);
71 | void showInputControls(bool visible);
72 | void showCameraControls(bool visible);
73 |
74 |
75 | protected:
76 | void resizeEvent(QResizeEvent *event);
77 |
78 | void showPluginDialogs(bool visible);
79 |
80 |
81 | private:
82 | Coordinator myBoss;
83 | VideoGlScene* theScene;
84 | FileInputDialog* fileDialog;
85 | PlaybackDialog* playDialog;
86 | CameraControlsDialog* camDialog;
87 | bool getNewSample;
88 | bool recordSnapshot;
89 | QString snapshotLocation;
90 |
91 | bool saveSnapshot(ImagePacket theImage);
92 |
93 | #ifdef ELLIPSE
94 | EllipseDetectionDialog* ellipseDialog;
95 | #endif
96 | #ifdef INTERFERO
97 | InterferoPluginDialog* interferoDialog;
98 | #endif
99 | #ifdef AMPLI
100 | AmpliPluginDialog* ampliDialog;
101 | #endif
102 | };
103 |
104 | #endif // MAINGUI_H
105 |
--------------------------------------------------------------------------------
/TODO.txt:
--------------------------------------------------------------------------------
1 | => Play back by default in real-time.
2 | => Allow zooming by scrolling
3 | => Allow cropping with offset X and Y
4 | => When only a single image present, it is not shown: Not starting at beginning/ending with last image?
5 |
6 | => IDS camera: implement FRAME QUEUE!! + clock frequency lijkt toch belangrijk hier, cropping, ..
7 |
8 | => Could create a RAM-sink to store a maximum of images in the system RAM => Can I poll for available RAM-memory? (=> can be used to decide on queue size in current backends to minimize data loss which would render this option unimportant). When finished ask where to store. => could solve issues when using a laptop for storing data. => polling is platform dependent but could be implemented.
9 | -- or could have this as an option (amount of GB of RAM that can be used).
10 | => kind of 'extra' options which are hidden by default like for the recording location dialog?
11 | => Min and Max fps and min and max exposure times should be shown
12 | => Allow modifying gain.
13 | => Allow modifying binning and skipping of lines
14 | => allow recording with subsample options or perhaps some compression settings?
15 |
16 | => Play High speed camera images directly (Multi-page Tiff?) From IDT.
17 |
18 | => Make display options pane to allow dynamic rescaling (uint16=>to min-max), colour interpolation,
19 | fixed scale, HISTOGRAM (=> Show histogram of current image). Or plot along one row/column.
20 |
21 |
22 | PLUGINS
23 | *******
24 |
25 | * Allow for image processing extensions (PLUGINS): accessible from right click menu. Check which dialogs to show
26 | - Ronald's tracking and beam steering => will also require output of txt file with positions
27 | >> could be achieved with different outputsink which embodies two different outputs: saving unmodified images with some skipping (lower fps is ok) + high fps x,y pair saving to csv file.
28 |
29 | - Ellipse Detection => will also require output to csv file
30 | - Background substraction (Schlieren applications): Another checkbox to activate
31 | - Colour Processing
32 | - Microscopy Toolbox:
33 | - Calibration + Scale bar? => with variable frequency target display scale bar on screen (+ stamped on images)
34 | - Imprint timestamp?
35 | - Opencv GoodFeaturesToTrack + optical flow?
36 | - Perhaps stack multiple post-processing options (pipeline) => ellipseDetection+track vs GoodFeatures+track vs Background + optical flow
37 |
38 | Interface for plugins:
39 |
40 | function startProcess() -> bool : initialise the processing plugin (perhaps plan some fft's...)
41 |
42 | function setSettings(sampleFrame,othersettings) -> bool : set the parameters (can change a lot)
43 |
44 | function process(frame) -> frame : analyse the data and return result that will be displayed
45 |
46 | function startRecording(filename) -> bool : could be used for emptying the vectors/lists where we will push the results
47 |
48 | function endRecording() -> bool : could be used for saving the final csv file with all results
49 |
50 | function endProcess() -> bool : deinitialise everything
51 |
52 | + dialog to input these settings
53 |
--------------------------------------------------------------------------------
/CompilingWindows.txt:
--------------------------------------------------------------------------------
1 | Compiling on Windows
2 | --------------------
3 | * Install Visual Studio 2017 Community, be sure to install the desktop development with C++ option, or at least the vc++2017 and windows sdk
4 | * Install Qt (version 5.9 ) + its Source
5 | * Install Git (Github desktop is an easy solution)
6 | * Opencv: Download and install in C:\opencv . It should have a build\x64\vc14 subdirectory
7 | * Add the corresponding 'bin'-directory to the windows path (the lib directory is only for linking)
8 | * HDF5: this is optional but allows to record into and read from HDF5 containers
9 | - Download and install. Copy installed files to C:\HDF5\1.8.15
10 | - Add the 'bin' directory to the windows path
11 | * Now open QtCreator (located in /users/tips/Qt) and click New Project - Import Project - Git Clone and for repository type:
12 | https://github.com/SamDehaeck/GigaViewer . Now select where you want to save and click continue.
13 | * If you installed Github desktop instead, clone the repository through its interface instead and open the .pro file from qtcreator afterwards.
14 |
15 | * For the moment, the precompiled Qt allows you to compile GigaViewer, but it only shows the result in the top right quadrant of the screen. Only solution
16 | I found so far is to recompile Qt myself (with -opengl desktop option)
17 | Steps to compile:
18 | - Open a Visual Studio x64 native command line
19 | - Python 2 is also required (if you want qtwebengine) => install Anaconda or similar and activate its environment (Anaconda\Scripts\activate)
20 | - Make a new build directory (e.g. C:\Qt\5.9.1\build64) and go inside it
21 | - ..\Src\configure -opensource -opengl desktop
22 | - nmake (or jom for parallel compilation if you have copied the downloaded files from qt in your build directory)
23 | - The generated libraries can now be found in qtbase\bin and the plugins in plugins\
24 | - Make a new Qt kit and locate the qmake file in build64\qtbase\bin
25 | - Now, the program should run as it should from within Qtcreator (once you have copied the platform folder to the directory in which Gigaviewer is executed.
26 | - When GigaViewer can find the old precompiled Qt libraries (in path), it will misbehave again..
27 |
28 | * Take a look at the configuration options mentionned in the Gigaviewer.pro file, to select the options you want to include.
29 | For the first compilation tests, it is best to leave all extra features turned off. In this way, the correct functioning of the Qt and Opencv libraries is checked.
30 | The Hdf5 option is a usefull extension, as it gives access to a more 'universal' single container movie format which is easily accessible from many different programs.
31 | The 'Tracking' option is work in progress to illustrate real time processing of the recorded images. It has no real use for the moment
32 | Before you enable the different camera backends, you need to install their respective 'drivers' first (to be found on their websites).
33 | * Inside this GigaViewer.pro file you can also find the compilation instructions for each platform. If the compilation does not work, check here if all the
34 | include directories are correct and library linking directories.
35 |
36 | ********
37 |
38 | * If you want/need to recompile opencv or hdf5 you will also need to install cmake
39 | * Open cmake and locate the source directory and define a build directory.
40 | * Press configure and if no problems are detected, press generate
41 | * Go into the build directory and build it (typing make or with visualstudio)
42 | * Install it (by typing make install or with visualstudio)
43 |
--------------------------------------------------------------------------------
/ellipsedetectiondialog.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | EllipseDetectionDialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 400
10 | 211
11 |
12 |
13 |
14 | Ellipse Detection
15 |
16 |
17 | 0.800000000000000
18 |
19 |
20 |
21 |
22 | 10
23 | 9
24 | 381
25 | 191
26 |
27 |
28 |
29 | -
30 |
31 |
32 | Max Diameter
33 |
34 |
35 |
36 | -
37 |
38 |
39 | Activate Processing
40 |
41 |
42 |
43 | -
44 |
45 |
46 | Threshold
47 |
48 |
49 |
50 | -
51 |
52 |
53 | Fit parameters
54 |
55 |
56 |
57 | -
58 |
59 |
60 | 50
61 |
62 |
63 | Qt::Horizontal
64 |
65 |
66 |
67 | -
68 |
69 |
70 | Qt::NoFocus
71 |
72 |
73 | Show Edges
74 |
75 |
76 | true
77 |
78 |
79 |
80 | -
81 |
82 |
83 | 50
84 |
85 |
86 | Qt::Horizontal
87 |
88 |
89 |
90 | -
91 |
92 |
93 | Min Diameter
94 |
95 |
96 |
97 | -
98 |
99 |
100 | 1
101 |
102 |
103 | 50
104 |
105 |
106 | Qt::Horizontal
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/mrfsourcesink.cpp:
--------------------------------------------------------------------------------
1 | #include "mrfsourcesink.h"
2 | #include "opencv2/opencv.hpp"
3 |
4 | bool MrfSourceSink::Init(QString)
5 | {
6 | return true;
7 | }
8 |
9 | bool MrfSourceSink::StartAcquisition(QString dev)
10 | {
11 | #ifdef Q_OS_WIN32
12 | mrf = fopen(dev.toStdString().c_str(),"rb");
13 | #else
14 | mrf = fopen(dev.toUtf8().data(),"rb");
15 | #endif
16 | uint firstNums[8];
17 | if (fread(&firstNums,sizeof(uint32_t),8,mrf) < 1) {
18 | fprintf(stderr,"Error reading first part of input Mrf file.\n");
19 | return false;
20 | }
21 |
22 | nFrames=firstNums[4];
23 | rows=static_cast(firstNums[6]);
24 | cols=static_cast(firstNums[5]);
25 | bitsperpixel=firstNums[7];
26 | if (bitsperpixel==8) {
27 | dataformat="MONO8";
28 | bytesperchunk=static_cast(rows*cols);
29 | } else {
30 | dataformat="MONO16";
31 | bytesperchunk=static_cast(2*rows*cols);
32 | }
33 | uint userdat=firstNums[3]-20;
34 |
35 | #ifdef Q_OS_WIN32
36 | _fseeki64(mrf,userdat,SEEK_CUR);
37 | headersize=_ftelli64(mrf);
38 | #else
39 | fseek(mrf,userdat,SEEK_CUR);
40 | headersize=static_cast(ftell(mrf));
41 | #endif
42 |
43 |
44 |
45 | currPos=0;
46 | // qDebug()<<"found a lot of frames: "<= static_cast(nFrames)-1)||(currPos+indexIncrement <0)) {
64 | return true;
65 | }
66 | if (indexIncrement!=1) {
67 | #ifdef Q_OS_WIN32
68 | _fseeki64(mrf,(indexIncrement-1)*bytesperchunk,SEEK_CUR);
69 | #else
70 | fseek(mrf,(indexIncrement-1)*static_cast(bytesperchunk),SEEK_CUR);
71 | #endif
72 | }
73 |
74 | cv::Mat temp;
75 | if (bitsperpixel==8) {
76 | temp = cv::Mat(rows,cols,CV_8U); // normally this implies that the data of temp is continuous
77 | size_t amread=fread(temp.data, 1, static_cast(rows*cols), mrf);
78 | if (amread==static_cast(rows*cols)) target.image=temp.clone();
79 | } else {
80 | temp = cv::Mat(rows,cols,CV_16U); // normally this implies that the data of temp is continuous
81 | size_t amread=fread(temp.ptr(0), 2, static_cast(rows*cols), mrf);
82 | if (amread==static_cast(rows*cols)) target.image=temp.clone();
83 | }
84 |
85 |
86 |
87 | currPos+=indexIncrement;
88 | target.seqNumber=currPos;
89 | target.pixFormat=dataformat;
90 | target.timeStamp=0;
91 | return true;
92 |
93 | }
94 |
95 | bool MrfSourceSink::IsOpened()
96 | {
97 | return true;
98 | }
99 |
100 | bool MrfSourceSink::SkipFrames(bool forward) {
101 | int skipping = 0;
102 | if (forward) {
103 | skipping=nFrames/10;
104 | } else {
105 | skipping=-nFrames/50;
106 | }
107 | // qDebug()<<"Will try to skip "<= static_cast(nFrames)-1)||(currPos+skipping <0)) {
110 | return true;
111 | }
112 |
113 |
114 | #ifdef Q_OS_WIN32
115 | _fseeki64(mrf,(skipping-1)*bytesperchunk,SEEK_CUR);
116 | #else
117 | fseek(mrf,(skipping-1)*static_cast(bytesperchunk),SEEK_CUR);
118 | #endif
119 | currPos+=skipping;
120 | return true;
121 | }
122 |
--------------------------------------------------------------------------------
/xvisourcesink.cpp:
--------------------------------------------------------------------------------
1 | #include "xvisourcesink.h"
2 | #include "opencv2/opencv.hpp"
3 |
4 | bool XviSourceSink::Init(QString)
5 | {
6 | return true;
7 | }
8 |
9 | bool XviSourceSink::StartAcquisition(QString dev)
10 | {
11 |
12 | #ifdef Q_OS_WIN32
13 | xvi = fopen(dev.toStdString().c_str(),"rb");
14 | #else
15 | xvi = fopen(dev.toUtf8().data(),"rb");
16 | #endif
17 | uint firstNums[10];
18 | if (fread(&firstNums,sizeof(uint32_t),10,xvi) < 1) {
19 | fprintf(stderr,"Error reading first part of input xvi file.\n");
20 | return false;
21 | }
22 | rows=static_cast(firstNums[5]);
23 | cols=static_cast(firstNums[4]);
24 | headersize=firstNums[7];
25 | bytesperchunk=firstNums[2]; //including footer
26 | bitsperpixel=2;
27 | dataformat="FLOAT";
28 | footersize=bytesperchunk-firstNums[9];
29 |
30 | #ifdef Q_OS_WIN32
31 | _fseeki64(xvi,0,SEEK_END);
32 | nFrames=(_ftelli64(xvi)-headersize)/bytesperchunk;
33 | #else
34 | fseek(xvi,0,SEEK_END);
35 | nFrames=(static_cast(ftell(xvi))-headersize)/bytesperchunk;
36 | #endif
37 |
38 |
39 | fseek(xvi,static_cast(headersize),SEEK_SET);
40 | currPos=0;
41 | // qDebug()<<"found a lot of frames: "<= static_cast(nFrames-1))||(currPos+indexIncrement <0)) {
59 | return true;
60 | }
61 | if (indexIncrement!=1) {
62 | #ifdef Q_OS_WIN32
63 | _fseeki64(xvi,(indexIncrement-1)*bytesperchunk,SEEK_CUR);
64 | #else
65 | fseek(xvi,(indexIncrement-1)*static_cast(bytesperchunk),SEEK_CUR);
66 | #endif
67 | }
68 |
69 | cv::Mat temp = cv::Mat(rows,cols,CV_16U); // normally this implies that the data of temp is continuous
70 | size_t amread=fread(temp.ptr(0), 2, static_cast(rows*cols), xvi);
71 | if (amread==static_cast(rows*cols)) {
72 | cv::Mat temp2=cv::Mat(rows,cols,CV_32F);
73 | temp2=temp.mul(1.0);
74 | target.image=temp2.clone();
75 |
76 | }
77 | char dummy[12];
78 | amread=fread(dummy,1,12,xvi);
79 | int64 time;
80 | amread=fread(&time,8,1,xvi);
81 |
82 | #ifdef Q_OS_WIN32
83 | _fseeki64(xvi,footersize-20,SEEK_CUR);
84 | #else
85 | fseek(xvi,static_cast(footersize)-20,SEEK_CUR);
86 | #endif
87 |
88 | currPos+=indexIncrement;
89 | target.seqNumber=currPos;
90 | target.pixFormat=dataformat;
91 | target.timeStamp=time/1000.0;
92 | return true;
93 |
94 | return false;
95 | }
96 |
97 | bool XviSourceSink::IsOpened()
98 | {
99 | return true;
100 | }
101 |
102 | bool XviSourceSink::SkipFrames(bool forward) {
103 | int skipping = 0;
104 | if (forward) {
105 | skipping=static_cast(round(nFrames/10));
106 | } else {
107 | skipping=-static_cast(round(nFrames/50));
108 | }
109 | // qDebug()<<"Will try to skip "<= static_cast(nFrames)-1)||(currPos+skipping <0)) {
112 | return true;
113 | }
114 |
115 |
116 | #ifdef Q_OS_WIN32
117 | _fseeki64(xvi,(skipping-1)*bytesperchunk,SEEK_CUR);
118 | #else
119 | fseek(xvi,(skipping-1)*static_cast(bytesperchunk),SEEK_CUR);
120 | #endif
121 | currPos+=skipping;
122 | return true;
123 | }
124 |
--------------------------------------------------------------------------------
/coordinator.cpp:
--------------------------------------------------------------------------------
1 | #include "coordinator.h"
2 |
3 | Coordinator::Coordinator(QObject *parent) :
4 | QObject(parent),picBack(parent),camBack(parent)
5 | ,opencvRunning(false),avtRunning(false)
6 | {
7 | // backendThread=new QThread();
8 | // backendThread->start();
9 | // camBack.moveToThread(backendThread);
10 |
11 | //propagate the signals coming from the backend
12 | connect(&camBack,SIGNAL(NewImageReady(ImagePacket)),this,SIGNAL(NewImageReady(ImagePacket)));
13 | connect(&camBack,SIGNAL(shutterChanged(int)),this,SIGNAL(shutterChanged(int)));
14 | connect(&camBack,SIGNAL(fpsChanged(double)),this,SIGNAL(fpsChanged(double)));
15 |
16 | }
17 |
18 | void Coordinator::controlCameraThread(bool startNew,QString dev)
19 | {
20 | if (startNew) {
21 | if (camBack.StartAcquisition(dev)) {
22 | camBack.start(QThread::HighPriority); // max is QThread::TimeCriticalPriority
23 | opencvRunning=true;
24 | } else {
25 | //reset gui buttons
26 | // FIX THIS!!!!
27 | //theGui->returnToStart();
28 | }
29 | } else {
30 | if (camBack.isRunning()) {
31 | camBack.StopAcquisition();
32 | if (!camBack.wait(5000)) {
33 | qDebug()<<"Had to kill the thread";
34 | camBack.terminate();
35 | }
36 | opencvRunning=false;
37 | camBack.ReleaseCamera(); //this checks first if it is opened. Needs to be called in same thread as camera.open
38 | } else {
39 | //qDebug()<<"Thread not running";
40 | }
41 |
42 | }
43 | }
44 |
45 |
46 |
47 | void Coordinator::LoadNewMovie(QString theMovie)
48 | {
49 | controlCameraThread(true,theMovie);
50 | }
51 |
52 | void Coordinator::stopAcquisition()
53 | {
54 | if (opencvRunning) {
55 | controlCameraThread(false);
56 | }
57 | }
58 |
59 | void Coordinator::StartNewAVT(bool startNew)
60 | {
61 | controlCameraThread(startNew,"AVT");
62 | }
63 |
64 | void Coordinator::StartNewVimba(bool startNew)
65 | {
66 | controlCameraThread(startNew,"Vimba");
67 | }
68 |
69 | void Coordinator::StartNewIds(bool startNew)
70 | {
71 | controlCameraThread(startNew,"IDS");
72 | }
73 |
74 | void Coordinator::changeShutter(int time)
75 | {
76 | if (camBack.isRunning()) {
77 | camBack.SetShutter(time);
78 | }
79 | }
80 |
81 | void Coordinator::setAutoShutter(bool fitRange)
82 | {
83 | if (camBack.isRunning()) {
84 | camBack.SetAutoShutter(fitRange);
85 | }
86 | }
87 |
88 | void Coordinator::changeFps(double newFps)
89 | {
90 | if (camBack.isRunning()) {
91 | camBack.SetInterval(newFps);
92 | }
93 | }
94 |
95 | void Coordinator::setRoiCols(int cols)
96 | {
97 | if (camBack.isRunning()) {
98 | camBack.setRoiCols(cols);
99 | }
100 | }
101 |
102 | void Coordinator::setRoiRows(int rows)
103 | {
104 | if (camBack.isRunning()) {
105 | camBack.setRoiRows(rows);
106 | }
107 | }
108 |
109 | void Coordinator::skipForwardBackward(bool forward)
110 | {
111 | if (camBack.isRunning()) {
112 | camBack.skipForwardBackward(forward);
113 | }
114 | }
115 |
116 | void Coordinator::StartRecording(bool start, QString recFold, QString codec,int skip)
117 | {
118 | if (camBack.isRunning()) {
119 | camBack.StartRecording(start,recFold,codec,skip);
120 | }
121 | }
122 |
123 | void Coordinator::newPluginSettingsReceived(QMap settings) {
124 | if (camBack.isRunning()) {
125 | //qInfo()<<"Should check the following plugin "<
2 |
3 | FileInputDialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 127
10 | 330
11 |
12 |
13 |
14 |
15 | 0
16 | 0
17 |
18 |
19 |
20 | Select Input
21 |
22 |
23 | 0.800000000000000
24 |
25 |
26 | -
27 |
28 |
-
29 |
30 |
31 | Opencv Feed
32 |
33 |
34 | false
35 |
36 |
37 |
38 | -
39 |
40 |
41 | Load Movie
42 |
43 |
44 |
45 | -
46 |
47 |
48 | Exit App
49 |
50 |
51 |
52 | -
53 |
54 |
55 | AVT Feed
56 |
57 |
58 | false
59 |
60 |
61 |
62 | -
63 |
64 |
65 |
66 | 0
67 | 0
68 |
69 |
70 |
71 | Load Picture
72 |
73 |
74 |
75 | -
76 |
77 |
78 | Qt::Vertical
79 |
80 |
81 |
82 | 20
83 | 40
84 |
85 |
86 |
87 |
88 | -
89 |
90 |
91 | Vimba Feed
92 |
93 |
94 | false
95 |
96 |
97 |
98 | -
99 |
100 |
101 | Ids Feed
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 | filePushButton
111 | MovieButton
112 | camButton
113 | AvtButton
114 | pushButton_2
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/cameracontrolsdialog.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | CameraControlsDialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 468
10 | 125
11 |
12 |
13 |
14 | Camera Controls
15 |
16 |
17 | 0.800000000000000
18 |
19 |
20 | -
21 |
22 |
23 | Shutter Time (us)
24 |
25 |
26 |
27 | -
28 |
29 |
30 | true
31 |
32 |
33 | Qt::NoFocus
34 |
35 |
36 | Reset ROI
37 |
38 |
39 |
40 | -
41 |
42 |
43 | Qt::NoFocus
44 |
45 |
46 | Analyze Image
47 |
48 |
49 |
50 | -
51 |
52 |
53 | Qt::NoFocus
54 |
55 |
56 | Fit Range
57 |
58 |
59 |
60 | -
61 |
62 |
63 | Qt::NoFocus
64 |
65 |
66 | Fit Mean
67 |
68 |
69 |
70 | -
71 |
72 |
73 | Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter
74 |
75 |
76 | true
77 |
78 |
79 | 9999999
80 |
81 |
82 | 100
83 |
84 |
85 | 100
86 |
87 |
88 |
89 | -
90 |
91 |
92 | -
93 |
94 |
95 | x
96 |
97 |
98 | Qt::AlignCenter
99 |
100 |
101 |
102 | -
103 |
104 |
105 | -
106 |
107 |
108 | 0
109 |
110 |
111 | Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter
112 |
113 |
114 |
115 |
116 |
117 |
118 | shutterSpinBox
119 | FitRangeButton
120 | FitMeanButton
121 | IntensityButton
122 | ROIRows
123 | ROICols
124 | pushButton_2
125 |
126 |
127 |
128 |
129 |
--------------------------------------------------------------------------------
/opencvsourcesink.cpp:
--------------------------------------------------------------------------------
1 | #include "opencvsourcesink.h"
2 |
3 | bool OpencvSourceSink::Init(QString) {
4 | return true;
5 | }
6 |
7 | bool OpencvSourceSink::StartAcquisition(QString dev) {
8 | if (dev.startsWith("0")) {
9 | camera.open(0);
10 | nFrames=0;
11 | liveFeed=true;
12 | } else {
13 | #ifdef Q_OS_WIN32
14 | camera.open(dev.toStdString().c_str());
15 | #else
16 | camera.open(dev.toUtf8().data());
17 | #endif
18 | nFrames=static_cast(round(camera.get(cv::CAP_PROP_FRAME_COUNT))); // not sure about the round
19 | liveFeed=false;
20 | }
21 | return camera.isOpened();
22 | }
23 |
24 | QString OpencvSourceSink::StartRecording(QString recFold, QString codec, int fps,int cols,int rows) {
25 | QDateTime mom = QDateTime::currentDateTime();
26 | QString filenam=recFold+"/"+mom.toString("yyyyMMdd-hhmmss")+".avi";
27 | int fourcc=0;
28 | if (codec=="MSMPEG4V2") {
29 | fourcc=cv::VideoWriter::fourcc('M','P','4','2'); // for mpeg4 from windows
30 | } else if (codec=="XVID") {
31 | fourcc=cv::VideoWriter::fourcc('F','M','P','4'); //for xvid
32 | } else {
33 | fourcc=0;// uncompressed raw format
34 | }
35 | #ifdef Q_OS_WIN32
36 | recFile=cv::VideoWriter(filenam.toStdString().c_str(),fourcc,fps,cv::Size(cols,rows));
37 | #else
38 | recFile=cv::VideoWriter(filenam.toUtf8().data(),fourcc,fps,cv::Size(cols,rows));
39 | #endif
40 | return filenam;
41 | }
42 |
43 | bool OpencvSourceSink::StopRecording() {
44 | //nothing to do
45 | return true;
46 | }
47 |
48 | bool OpencvSourceSink::StopAcquisition() {
49 | //nothing specific to do, the thread will be quited in thread class
50 | return true;
51 | }
52 |
53 | bool OpencvSourceSink::ReleaseCamera() {
54 | if (camera.isOpened()) camera.release();
55 | return true;
56 | }
57 |
58 | bool OpencvSourceSink::RecordFrame(ImagePacket &source) {
59 | if (source.image.channels()==1) {
60 | cv::Mat dummy;
61 | cv::cvtColor(source.image,dummy,cv::COLOR_GRAY2RGB);
62 | recFile<=0) {
76 | camera.set(cv::CAP_PROP_POS_FRAMES,newpos);
77 | }
78 | }
79 | // qDebug()<(round(camera.get(cv::CAP_PROP_POS_FRAMES)));
83 | } else {
84 | target.seqNumber=target.seqNumber+1;
85 | }
86 | camera >> target.image;
87 | if (target.image.channels()==3) {
88 | target.pixFormat="RGB8";
89 | } else {
90 | target.pixFormat="MONO8";
91 | }
92 | double timetime=QDateTime::currentMSecsSinceEpoch();
93 | target.timeStamp=timetime;
94 |
95 | return true;
96 | }
97 |
98 | bool OpencvSourceSink::IsOpened() {
99 | return camera.isOpened();
100 | }
101 |
102 | bool OpencvSourceSink::SkipFrames(bool forward)
103 | {
104 | // qDebug()<<"Number of frames"<(round(camera.get(cv::CAP_PROP_POS_FRAMES)));
107 | int skipping = 0;
108 | if (forward) {
109 | skipping=nFrames/10;
110 | } else {
111 | skipping=-nFrames/50;
112 | }
113 | // qDebug()<<"Will try to skip "<= nFrames-1)||(currPos+skipping <0)) {
117 | return true;
118 | }
119 |
120 |
121 | camera.set(cv::CAP_PROP_POS_FRAMES,currPos+skipping);
122 | currPos+=skipping;
123 | }
124 | return true;
125 |
126 |
127 | }
128 |
129 |
--------------------------------------------------------------------------------
/interferoplugindialog.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | InterferoPluginDialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 483
10 | 204
11 |
12 |
13 |
14 | Interferogram Analysis
15 |
16 |
17 | 0.800000000000000
18 |
19 |
20 | -
21 |
22 |
-
23 |
24 |
25 | Skip frames
26 |
27 |
28 |
29 | -
30 |
31 |
32 | Activate Processing
33 |
34 |
35 |
36 | -
37 |
38 |
39 | 10
40 |
41 |
42 | 1
43 |
44 |
45 | 0
46 |
47 |
48 | Qt::Horizontal
49 |
50 |
51 | QSlider::TicksBelow
52 |
53 |
54 | 1
55 |
56 |
57 |
58 | -
59 |
60 |
61 | Qt::NoFocus
62 |
63 |
64 | Take new reference
65 |
66 |
67 | false
68 |
69 |
70 |
71 | -
72 |
73 |
74 | Subsampling
75 |
76 |
77 |
78 | -
79 |
80 |
81 | 1
82 |
83 |
84 | 10
85 |
86 |
87 | 1
88 |
89 |
90 | Qt::Horizontal
91 |
92 |
93 | QSlider::TicksBelow
94 |
95 |
96 | 1
97 |
98 |
99 |
100 | -
101 |
102 |
103 | Qt::Vertical
104 |
105 |
106 |
107 | 20
108 | 40
109 |
110 |
111 |
112 |
113 | -
114 |
115 |
116 | Fringe spacing ? and angle ?
117 |
118 |
119 | Qt::AlignCenter
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
--------------------------------------------------------------------------------
/videoglscene.cpp:
--------------------------------------------------------------------------------
1 | #include "videoglscene.h"
2 |
3 | #include
4 | #include
5 | #include
6 |
7 | VideoGlScene::VideoGlScene(QList controlDialogs, QObject *parent) :
8 | QGraphicsScene(parent),didInitOpengl(0)
9 | {
10 | QDialog* dial;
11 | int ini=0;
12 | QPointF pos(10, 10);
13 | foreach (dial, controlDialogs) {
14 | //now make the control dialogues
15 | QGraphicsProxyWidget *proxy = new QGraphicsProxyWidget(nullptr, Qt::Dialog|Qt::CustomizeWindowHint|Qt::WindowTitleHint);
16 | proxy->setWidget(dial);
17 | addItem(proxy);
18 |
19 | switch (ini) {
20 | case 0:
21 | proxy->setData(0,"INPUT");
22 | break;
23 | case 1:
24 | proxy->setData(0,"PLAYBACK");
25 | break;
26 | case 2:
27 | proxy->setData(0,"CAMERA");
28 | break;
29 | default: // all the others are plugins
30 | proxy->setData(0,"PLUGIN");
31 | break;
32 | }
33 | proxy->setFlag(QGraphicsItem::ItemIsMovable);
34 | proxy->setCacheMode(QGraphicsItem::DeviceCoordinateCache);
35 |
36 |
37 | if (ini!=0) proxy->setVisible(false);
38 |
39 | const QRectF rect = proxy->boundingRect();
40 | proxy->setPos(pos.x() - rect.x(), pos.y() - rect.y());
41 |
42 | // now will add the offset for placing the next dialog as we have the size now
43 | if (ini==0) pos += QPointF(650,-350); //This is the base Y offset for the extra panels
44 | pos += QPointF(0,10 + rect.height());
45 |
46 | ini=ini+1;
47 | }
48 |
49 |
50 | }
51 |
52 | void VideoGlScene::drawBackground(QPainter *painter, const QRectF &)
53 | {
54 | if (painter->paintEngine()->type() != QPaintEngine::OpenGL
55 | && painter->paintEngine()->type() != QPaintEngine::OpenGL2)
56 | {
57 | qWarning("OpenGLScene: drawBackground needs a QGLWidget to be set as viewport on the graphics view");
58 | return;
59 | }
60 |
61 | /* if (didInitOpengl==0) {
62 | initializeOpenGLFunctions();
63 | didInitOpengl=1;
64 | }*/
65 | QOpenGLFunctions_3_0 *f = QOpenGLContext::currentContext()->versionFunctions();
66 |
67 | // QOpenGLFunctions_3_3_Compatibility *f = QOpenGLContext::currentContext()->versionFunctions();
68 | f->initializeOpenGLFunctions();
69 | painter->beginNativePainting();
70 |
71 |
72 |
73 | //place image drawing code here
74 | int depth = imageBuff.depth();
75 | int cn = imageBuff.channels();
76 | GLenum format = GL_LUMINANCE;
77 | if (cn==3) {
78 | #ifdef Q_OS_WIN32
79 | // format = GL_RGB; // this setting was used for correct webcam viewing but is not working for Vimba colour cameras
80 | format = GL_BGR;
81 | #else
82 | format = GL_BGR;
83 | #endif
84 |
85 | } else if (cn==4) {
86 | #ifdef Q_OS_WIN32
87 | format = GL_RGBA;
88 | #else
89 | format = GL_BGRA;
90 | #endif
91 | }
92 | GLenum gldepth = GL_UNSIGNED_BYTE;
93 | if (depth==CV_16U) gldepth=GL_UNSIGNED_SHORT;
94 |
95 | f->glEnable(GL_TEXTURE_2D);
96 | f->glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
97 |
98 | GLuint mytex;
99 | f->glGenTextures(1,&mytex);
100 | f->glBindTexture(GL_TEXTURE_2D, mytex);
101 | f->glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
102 | f->glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
103 | f->glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
104 | f->glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
105 | if (imageBuff.isContinuous()) glPixelStorei(GL_UNPACK_ALIGNMENT,1);
106 | f->glTexImage2D(GL_TEXTURE_2D,0,cn,imageBuff.cols,imageBuff.rows,0,format,gldepth,imageBuff.data);
107 |
108 | //calculate projected size in order to keep aspect ratio intact
109 | int maxX=1024;
110 | int maxY=768;
111 | if (imageBuff.rows!=0) {
112 | double aspRatio=static_cast(imageBuff.rows)/imageBuff.cols;
113 | double windowAspRatio=static_cast(this->height())/this->width();
114 | if (aspRatio>windowAspRatio) {
115 | // amount of rows is limiting factor
116 | maxY=static_cast(round(this->height()));
117 | maxX=static_cast(round(maxY/aspRatio));
118 | } else {
119 | maxX=static_cast(round(this->width()));
120 | maxY=static_cast(round(maxX*aspRatio));
121 | }
122 | }
123 |
124 | f->glBegin(GL_QUADS);
125 |
126 | f->glTexCoord2f(0.0,0.0); f->glVertex2f(0,0);
127 | f->glTexCoord2f(1.0,0.0); f->glVertex2f(maxX,0.0);
128 | f->glTexCoord2f(1.0,1.0); f->glVertex2f(maxX,maxY);
129 | f->glTexCoord2f(0.0,1.0); f->glVertex2f(0.0,maxY);
130 |
131 | f->glEnd();
132 | f->glDisable(GL_TEXTURE_2D);
133 | f->glDeleteTextures(1,&mytex);
134 |
135 |
136 | painter->endNativePainting();
137 |
138 | }
139 |
140 |
141 |
142 |
143 |
--------------------------------------------------------------------------------
/cameracontrolsdialog.cpp:
--------------------------------------------------------------------------------
1 | #include "cameracontrolsdialog.h"
2 | #include "ui_cameracontrolsdialog.h"
3 | #include
4 | #include
5 | #include
6 |
7 |
8 |
9 | CameraControlsDialog::CameraControlsDialog(QWidget *parent) :
10 | QDialog(parent),
11 | ui(new Ui::CameraControlsDialog)
12 | {
13 | ui->setupUi(this);
14 | }
15 |
16 | CameraControlsDialog::~CameraControlsDialog()
17 | {
18 | delete ui;
19 | }
20 |
21 | void CameraControlsDialog::on_IntensityButton_clicked()
22 | {
23 | emit NeedNewSample();
24 | }
25 |
26 | void CameraControlsDialog::GotNewSample(ImagePacket imP)
27 | {
28 | double max;
29 | cv::minMaxLoc(imP.image,nullptr,&max);
30 | ui->IntensLabel->setText(QString::number(max));
31 | ui->ROIRows->setText(QString::number(imP.image.rows));
32 | ui->ROICols->setText(QString::number(imP.image.cols));
33 |
34 | if (imP.image.channels()==1) {
35 | /*
36 | // now do the fft to find the pattern frequency
37 | cv::Mat padded; //expand input image to optimal size
38 | int m = cv::getOptimalDFTSize( imP.image.rows );
39 | int n = cv::getOptimalDFTSize( imP.image.cols ); // on the border add zero values
40 | if (m chan;
49 | cv::split(result,chan);
50 | padded.convertTo(chan[0],CV_32F);
51 | cv::Scalar me=cv::mean(chan[0]);
52 | // qDebug()<<"Mean of image is"< log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2))
62 | cv::split(result, chan); // planes[0] = Re(DFT(I), planes[1] = Im(DFT(I))
63 | cv::Mat magI;
64 | cv::magnitude(chan[0], chan[1], magI);// planes[0] = magnitude
65 |
66 | magI = magI(cv::Rect(0, 0, magI.cols & -2, magI.rows & -2));
67 | int cx = magI.cols/2;
68 | int cy = magI.rows/2;
69 |
70 | cv::Mat q0(magI, cv::Rect(0, 0, cx, cy)); // Top-Left - Create a ROI per quadrant
71 | // cv::Mat q1(magI, cv::Rect(cx, 0, cx, cy)); // Top-Right
72 | cv::Mat q2(magI, cv::Rect(0, cy, cx, cy)); // Bottom-Left
73 | // cv::Mat q3(magI, cv::Rect(cx, cy, cx, cy)); // Bottom-Right
74 |
75 | cv::Mat tmp; // swap quadrants (Keep only left part of original (right after swap)
76 | q0.copyTo(tmp);
77 | q2.copyTo(q0);
78 | tmp.copyTo(q2);
79 |
80 | int xoffset=10; // to avoid using the DC peak, implies a minimum period of n/10 => 1024x1024 => 102 pixel period minimum!!
81 | cv::Mat newTemp=magI(cv::Rect(xoffset,0,cx-xoffset,magI.rows));
82 |
83 | cv::Point maxInd;
84 | double maxMag;
85 | cv::minMaxLoc(newTemp,NULL,&maxMag,NULL,&maxInd);
86 | double period=m/(sqrt(pow((maxInd.x),2.)+pow((maxInd.y-cy),2.))+xoffset); //problem if m!=n !!!! // add a '.' after the 2 to prevent error with type conversion msvc10 bug...
87 | double angle=atan2((double)(cy)-maxInd.y,maxInd.x)*180.0/3.1415965359; // type conversion of cy to avoid an error with msvc10, again...
88 |
89 | ui->IntensLabel->setText("Max Intensity: "+QString::number(max)+" - Dominant period: "+QString::number(period,'f',2)+" at "+QString::number(angle,'f',2)+QChar(0xB0));
90 | */
91 | ui->IntensLabel->setText("Max Intensity: "+QString::number(max));
92 | } else {
93 | ui->IntensLabel->setText("Max Intensity: "+QString::number(max));
94 | }
95 |
96 |
97 |
98 | }
99 |
100 | void CameraControlsDialog::GotNewShutterSpeed(int shut)
101 | {
102 | shutSpeed=shut;
103 | ui->shutterSpinBox->setValue(shut);
104 | }
105 |
106 | void CameraControlsDialog::on_shutterSpinBox_valueChanged(int arg1)
107 | {
108 | if (arg1!=shutSpeed) {
109 | emit SetShutterSpeed(arg1);
110 | shutSpeed=arg1;
111 | }
112 | }
113 |
114 | void CameraControlsDialog::on_FitRangeButton_clicked()
115 | {
116 | emit SetAutoShutter(true);
117 | }
118 |
119 | void CameraControlsDialog::on_FitMeanButton_clicked()
120 | {
121 | emit SetAutoShutter(false);
122 | }
123 |
124 | void CameraControlsDialog::on_ROIRows_editingFinished()
125 | {
126 | //qDebug()<<"ROI rows updated to "<ROIRows->text();
127 | emit SetRoiRows(ui->ROIRows->text().toInt());
128 | }
129 |
130 | void CameraControlsDialog::on_ROICols_editingFinished()
131 | {
132 | // qDebug()<<"ROI cols updated to "<ROICols->text();
133 | emit SetRoiCols(ui->ROICols->text().toInt());
134 | }
135 |
136 | void CameraControlsDialog::on_pushButton_2_clicked()
137 | {
138 | // emitting unrealistic roi values should set them to their maximum
139 | emit SetRoiRows(5000);
140 | emit SetRoiCols(5000);
141 | }
142 |
--------------------------------------------------------------------------------
/fmf-tools/matlab/FMF.m:
--------------------------------------------------------------------------------
1 | classdef FMF
2 | % FMF object allows to read an FMF movie
3 | %
4 | % This class was based on the functions written by the MOTMOT project
5 |
6 | properties
7 | filePointer; % a pointer to the open file
8 | datatype;
9 | data_format;
10 | header_size;
11 | h;
12 | w;
13 | bytes_per_chunk;
14 | n_frames;
15 | end
16 |
17 | methods
18 | function obj=FMF(filename)
19 | obj.filePointer = fopen( filename, 'r' );
20 | obj=readHeader(obj);
21 | % obj.datatype = fmf_get_datatype(obj.data_format);
22 |
23 | end
24 |
25 | function [image,time] = getFrame(obj,index)
26 | if (index>obj.n_frames),
27 | warning('index is larger than amount of frames present');
28 | time = 9e9;
29 | image = zeros( obj.h, obj.w, 'double' );
30 | else,
31 | fseek( obj.filePointer, obj.header_size+(index-1)*obj.bytes_per_chunk, 'bof' );
32 | [image,time]=readData(obj);
33 | end;
34 |
35 | end
36 | end
37 |
38 | methods (Access = private)
39 | function obj=readHeader(obj)
40 | % read header
41 | header_size = 28;
42 | version = double( fread( obj.filePointer, 1, 'uint32' ) );
43 | if version ~= 1,
44 | if version ~= 3,
45 | error( 'version not supported -- FMF versions 1 and 3 only' );
46 | end
47 | end
48 |
49 | if version == 1,
50 | obj.data_format = 'MONO8';
51 | end
52 |
53 | if version == 3,
54 | format_len = double( fread( obj.filePointer, 1, 'uint32' ) );
55 | obj.data_format = strcat( char( fread( obj.filePointer, format_len))');
56 | bits_per_pixel = double( fread( obj.filePointer, 1, 'uint32' ) );
57 | obj.header_size = header_size + format_len + 8;
58 | end
59 |
60 | obj=translateDataFormat(obj);
61 |
62 | obj.h = double( fread( obj.filePointer, 1, 'uint32' ) );
63 | obj.w = double( fread( obj.filePointer, 1, 'uint32' ) );
64 | %bytes_per_chunk = double( fread( fp, 1, 'uint64' ) );
65 | obj.bytes_per_chunk = double( fread( obj.filePointer, 1, 'long' ) );
66 | %frame_count_location = ftell(fp);
67 | %max_n_frames = double( fread( fp, 1, 'uint64' ) );
68 | temp=fread( obj.filePointer, 2, 'uint32' );
69 | obj.n_frames = double( temp(2) ); % ugly 'hack' to read this correctly, will not work for very large files
70 | %warning('Noted amount of frames: %d ',obj.n_frames);
71 |
72 | if obj.n_frames <= 0,
73 | fseek(obj.filePointer,0,'eof');
74 | endPos=ftell(obj.filePointer);
75 | obj.n_frames=int16((endPos-obj.header_size)/obj.bytes_per_chunk);
76 | warning('Could not read number of frames, calculated it to be %d ',obj.n_frames);
77 | % fseek( fp, frame_count_location, 'bof');
78 | % fwrite( fp, max_n_frames, 'uint64' );
79 | end
80 | end
81 |
82 | function obj=translateDataFormat(obj)
83 | if strcmp(obj.data_format,'MONO8'),
84 | obj.datatype = 'uint8'; % replace with '*uint8' if you want the output to be uint8 instead of double
85 | else,
86 | if (strcmp(obj.data_format,'MONO12')||strcmp(obj.data_format,'MONO14')||strcmp(obj.data_format,'BAYERRG12')),
87 | obj.datatype = 'uint16';
88 | else,
89 | if strcmp(obj.data_format,'BAYERRG8'),
90 | obj.datatype = 'uint8';
91 | else,
92 | if strcmp(obj.data_format,'BAYERGB8'),
93 | obj.datatype = 'uint8';
94 | else,
95 | error( 'Unrecognised data type' );
96 | obj.datatype='uint8';
97 | end
98 | end
99 | end
100 | end
101 | end
102 |
103 | function [data,stamp]=readData(obj)
104 | if feof( obj.filePointer ),
105 | stamp = 9e9;
106 | data = zeros( obj.h, obj.w, obj.datatype );
107 | else
108 | stamp = fread( obj.filePointer, 1, 'double' );
109 | end
110 |
111 | if feof( obj.filePointer ),
112 | stamp = 9e9;
113 | data = zeros( obj.h, obj.w, obj.datatype );
114 | else
115 | buf = fread( obj.filePointer, obj.h*obj.w, obj.datatype);
116 | if size( buf, 2 ) == 0 | size( buf, 1 ) ~= obj.w*obj.h
117 | stamp = 9e9;
118 | data = zeros( obj.h, obj.w, obj.datatype );
119 | else
120 | data = reshape( buf, obj.w, obj.h )';
121 | end
122 | %old: data = reshape( fread( obj.filePointer, h*w ), w, h )';
123 | end
124 | end
125 |
126 | function delete(obj)
127 | fclose( obj.filePointer );
128 | end
129 | end
130 | end
131 |
--------------------------------------------------------------------------------
/fmf-tools/imagej-plugin/Read_FMF_Virt.java:
--------------------------------------------------------------------------------
1 | /*
2 | Author : Sam Dehaeck (based on work by Jerome Mutterer - Guess Raw)
3 | Purpose : Read in a FMF file
4 | Application example : Read in a FMF file
5 | */
6 |
7 | import java.io.*;
8 | import ij.*;
9 | import ij.io.*;
10 | import ij.plugin.PlugIn;
11 | import ij.gui.*;
12 |
13 |
14 | public class Read_FMF_Virt implements PlugIn {
15 |
16 | // variables init
17 |
18 | public String FILENAME = "";
19 | public String DIRECTORY = "";
20 | public long IMAGE_WIDTH = 0;
21 | public long FILE_LENGTH = 0;
22 | public long START_VALUE = 0;
23 | public File RAW;
24 |
25 | // additionnal setings for raw file openning can be edited here :
26 |
27 | public long NUMBER_IMAGES = 1;
28 | public long FILE_OFFSET = 41;
29 | public long FILE_GAP = 8;
30 | public boolean WHITE_BACKGROUND = false;
31 |
32 | // end additionnal settings
33 |
34 |
35 | public void run(String arg) {
36 |
37 | OpenDialog od = new OpenDialog("Read FMF file... ", arg);
38 | FILENAME = od.getFileName();
39 | if (FILENAME == null) {IJ.error("no file selected"); return;}
40 | DIRECTORY = od.getDirectory();
41 | if (DIRECTORY == null) {IJ.error("no file selected"); return;}
42 |
43 | RAW = new File(DIRECTORY, FILENAME);
44 | FILE_LENGTH = RAW.length();
45 |
46 | DataInputStream inFile = null;
47 | try {
48 | inFile=new DataInputStream(new FileInputStream(RAW));
49 | //int version=inFile.readInt();
50 | //int formatlen=inFile.readInt();
51 | //long b = version & 0xffffffffL;
52 |
53 | byte[] buffer = new byte[4];
54 | inFile.readFully(buffer);
55 | int version = (buffer[0] & 0xFF) | (buffer[1] & 0xFF) << 8 | (buffer[2] & 0xFF) << 16 | (buffer[3] & 0xFF) << 24;
56 |
57 | inFile.readFully(buffer);
58 | int formatlen = (buffer[0] & 0xFF) | (buffer[1] & 0xFF) << 8 | (buffer[2] & 0xFF) << 16 | (buffer[3] & 0xFF) << 24;
59 | FILE_OFFSET = 48+(formatlen-4); // should add first timestamp of 8 bytes after real header size!!!
60 |
61 | //inFile.skipBytes(formatlen);
62 | char lettre='z';
63 | String form="";
64 | byte buflet;
65 | for (int i=0;ibounded(99999)); // introduced in QT 5.10 and only 5.9 available on Ubuntu!
31 | qsrand(time(nullptr));
32 | int x=qrand() % 1000000;
33 | // qInfo("Random number is: %d",x);
34 | groupId=(groupId.append("-%1")).arg(x);
35 | // qInfo(groupId.toLocal8Bit());
36 | }
37 | if(RD_KAFKA_CONF_OK != rd_kafka_conf_set(consCfg, "group.id", groupId.toLocal8Bit(), // group id is set here!
38 | errstr, sizeof(errstr))) {
39 | qInfo("rd_kafka_conf_set() failed with error: %s\n", errstr);
40 | }
41 |
42 | // now start with configuration for topic
43 | consTopicCfg = rd_kafka_topic_conf_new();
44 | if (RD_KAFKA_CONF_OK != rd_kafka_topic_conf_set(consTopicCfg, "auto.offset.reset", earliestLatest.toLocal8Bit() // offset 'earliest' is set here!!
45 | ,errstr, sizeof(errstr))) {
46 | qInfo("rd_kafka_topic_conf_set() failed with error: %s\n", errstr);
47 | }
48 | // use this topicconfiguration as the default for all topics I will subscribe to
49 | rd_kafka_conf_set_default_topic_conf(consCfg, consTopicCfg);
50 |
51 | // Now create the consumer handle
52 | myConsumer = rd_kafka_new(RD_KAFKA_CONSUMER, consCfg, errstr, sizeof(errstr));
53 | if(myConsumer == nullptr) {
54 | qInfo("Failed to create consumer:%s", errstr);
55 | }
56 |
57 | /* rd_kafka_poll_set_consumer() is used to redirect the main queue which is
58 | * serviced using rd_kafka_poll() to the rd_kafka_consumer_poll(). With one api
59 | * 'rd_kafka_consumer_poll()' both callbacks and message are serviced.
60 | * Once queue is forwarded using this API, it is not permitted to call
61 | * rd_kafka_poll to service non message delivery callbacks.
62 | */
63 | rd_kafka_poll_set_consumer(myConsumer);
64 |
65 | /* Topic partition list (tp_list) is supplied as an input to the consumer
66 | * subscribe(using rd_kafka_subscribe()). The api rd_kafka_subscribe() expects
67 | * that the partition argument to be set to RD_KAFKA_PARTITION_UA and internally
68 | * all partitions are assigned to the consumer.
69 | * Note: partition balancing/assignment is done if more consumers are part
70 | * of the same consumer group.
71 | */
72 |
73 | rd_kafka_topic_partition_list_t *tp_list = rd_kafka_topic_partition_list_new(0);
74 | rd_kafka_topic_partition_t* tpObj = rd_kafka_topic_partition_list_add(tp_list,
75 | topic.toLocal8Bit(), RD_KAFKA_PARTITION_UA);
76 | if (nullptr == tpObj) {
77 | qInfo("Could not add the topic partition to the list.\n");
78 | }
79 |
80 | errCode = rd_kafka_subscribe(myConsumer, tp_list);
81 | if (errCode != RD_KAFKA_RESP_ERR_NO_ERROR) {
82 | qInfo("Topic partition subscription failed. ERROR: %d\n", errCode);
83 | }
84 | // now remove topiclist
85 | rd_kafka_topic_partition_list_destroy(tp_list);
86 | return myConsumer;
87 | }
88 |
89 | // this is the slot called by the timer
90 | void KafkaController::timedConsumption() {
91 | //should first check if I am connected to a camera!
92 | QList newCommands=consumeMsg(consumerHdle,timeout);
93 |
94 | if (newCommands.count()>0) {
95 | //qInfo("Received %i message(s) on topic %s",newCommands.count(),qUtf8Printable(topic));
96 | emit sendNewData(newCommands);
97 | }
98 | }
99 |
100 | // this will poll for new messages until the queue is empty and return a list of json's
101 | // Other info that is discarded: key, offset, partition, topic
102 | QList KafkaController::consumeMsg(rd_kafka_t* handle,int timeout,int maxMessages) {
103 | bool breakFromLoop=false;
104 | int receivedNumber=0;
105 | QList messageList;
106 | while (not breakFromLoop) {
107 | rd_kafka_message_t *msg = rd_kafka_consumer_poll(handle, timeout);
108 | if (msg != nullptr) {
109 | if (msg->err == RD_KAFKA_RESP_ERR_NO_ERROR) {
110 | QByteArray topp=rd_kafka_topic_name(msg->rkt);
111 | QByteArray message=QByteArray(static_cast(msg->payload));
112 | //int64_t offset=msg->offset; // this parameter is interesting so as to see if you are lagging!
113 | QJsonDocument qjson=QJsonDocument::fromJson(message);
114 | if (qjson.isNull()){
115 | qInfo("Problem parsing message from topic %s: %s",topp.data(),static_cast(msg->payload));
116 | } else {
117 | messageList.append(qjson);
118 | receivedNumber++;
119 | }
120 | } else {
121 | qInfo("Some error message received in consumption");
122 | breakFromLoop=true; // not the same, but should probably break from loop
123 | }
124 | rd_kafka_message_destroy(msg);
125 | if (receivedNumber>maxMessages) {
126 | breakFromLoop=true;
127 | }
128 | } else {
129 | breakFromLoop=true;
130 | }
131 | }
132 | return messageList;
133 | }
134 |
135 | void KafkaController::willStartTheTimer(int interval)
136 | {
137 | timer->setInterval(interval);
138 | timer->start();
139 | }
140 |
141 | void KafkaController::willStopTheTimer()
142 | {
143 | timer->stop();
144 | }
145 |
--------------------------------------------------------------------------------
/ellipsedetection.cpp:
--------------------------------------------------------------------------------
1 | #include "ellipsedetection.h"
2 |
3 |
4 | using namespace cv;
5 |
6 | EllipseDetection::EllipseDetection(int thresh) : threshold(thresh),activated(false),feedback(false){
7 |
8 | }
9 |
10 | void EllipseDetection::ChangeSettings(QMap settings) {
11 | minDiameter=settings["MinD"].toInt();
12 | maxDiameter=settings["MaxD"].toInt();
13 | threshold=settings["threshold"].toInt();
14 | if ((!activated)&&(settings["activated"].toBool())) {
15 | //qDebug()<<"Should initialise";
16 | }
17 | if (activated&&(!settings["activated"].toBool())) {
18 | //qDebug()<<"Should write to disk";
19 | }
20 |
21 | activated=settings["activated"].toBool();
22 | feedback=settings["showFeedback"].toBool();
23 | //qDebug()<<"activated="<(round(threshold*255/100.0)); //change here if it needs to work for 16-bit
39 |
40 | int apSize=3;
41 | // Find edge pixels
42 | Mat I=currIm.image.clone();
43 | Mat edges;
44 | // Mat sobx,soby,sobang,sobmag,edges;
45 | // Sobel( I, sobx,CV_32F,1, 0, apSize );
46 | // Sobel( I, soby,CV_32F, 0, 1, apSize );
47 | // cartToPolar(sobx,soby,sobmag,sobang);
48 | Canny( I, edges,0.5*newT,newT,apSize,true);
49 |
50 | Mat labs; // = Mat::zeros( edges.size(), CV_32S ); // should be able to store all contours in a different label => 8U not suff
51 | Mat stats;// = Mat::zeros( edges.size(), CV_32S ); // should be able to store all contours in a different label => 8U not suff
52 | Mat centres;
53 | int nrLabs=connectedComponentsWithStats(edges,labs,stats,centres);
54 |
55 | //qDebug()<<"Amount of labels: "<1) {
57 | // now do single loop over labs to create a vector of vector of points where each subvector contains the coordinates
58 | // of that label. Also make a separate vector to contain amount of edge points for each label.
59 | // Easier to eliminate too small or too large labels before full loop.
60 | int32_t* pixPointer;
61 | size_t nrLabsSize=static_cast(nrLabs);
62 | std::vector > labCont(nrLabsSize);
63 | std::vector counter(nrLabsSize);
64 |
65 | for (int i=0;i(i);
67 | for (int j=0;j(pixPointer[j]);
69 | if (label==0) continue;
70 |
71 | labCont[label-1].push_back(Point(j,i));
72 | if (labCont[label-1].size()==0) {
73 | counter[label-1]=1;
74 | } else {
75 | counter[label-1]=counter[label-1]+1;
76 | }
77 |
78 | }
79 | }
80 |
81 | // vector accum;
82 | // int solCounter=0;
83 | // int resColumns=0;
84 | int minContour= 20;
85 | int maxSize=max(I.rows,I.cols);
86 | double minDiam = minDiameter/100.0*maxSize;
87 | double maxDiam = maxDiameter/100.0*maxSize;
88 | std::vector foundEllipses;
89 | for (size_t i=0;i(fEll.size.width/fEll.size.height);
95 | double minAxis=static_cast(min(fEll.size.width,fEll.size.height));
96 | double maxAxis=static_cast(max(fEll.size.width,fEll.size.height));
97 | // qDebug()<<"Min and max are: "<minDiam && maxAxis0.8 && aspRat < 1.2) {
100 | cv::ellipse(I,fEll,150,5);
101 | foundEllipses.push_back(fEll);
102 | //}
103 | }
104 | }
105 |
106 | }
107 |
108 | // now calculate mean equivalent diameter
109 | if (foundEllipses.size()>0) {
110 | double accumMean=0;
111 | for (uint i=0;i(foundEllipses[i].size.width*foundEllipses[i].size.height));
113 | }
114 | double myMean=accumMean/foundEllipses.size();
115 | currIm.message.insert("Ellipse",myMean); // put the result in the image message.
116 | //qDebug()<<"Mean: "< > contours;
142 | // cv::Mat hierarchy;
143 | // cv::findContours( processed, contours, hierarchy, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, Point(0, 0) );
144 |
145 | // cv::Mat outImage=currIm.image.clone(); // use a clone to avoid storing on raw image which is stored.
146 | // cv::drawContours(outImage,contours,-1,255,2);
147 |
148 | // cv::Point newLoc(targetX*currIm.image.cols/100.0,targetY*currIm.image.rows/100.0);
149 | // cv::ellipse(outImage,newLoc,cv::Size(5,5),0,0,360,150,-1);
150 |
151 | // currIm.image=outImage;
152 |
153 | // now make string to save
154 | //QString outst(""
155 | // outst=""+
156 | // currIm.timeStamp+" "+threshold+" "+targetX+""
157 | // dataToSave.append(outst);
158 |
159 |
160 | } else {
161 | qDebug()<<"Image format not yet supported! "<
3 | #include
4 |
5 | bool RegexSourceSink::Init(QString)
6 | {
7 | return true;
8 | }
9 |
10 | bool RegexSourceSink::StartAcquisition(QString dev)
11 | {
12 | QRegExp rx("(.+)/(.+)(\\..+)"); //somepath/somename.someextension
13 | int pos=0;
14 |
15 | pos=rx.indexIn(dev);
16 | dir=rx.cap(1);
17 | basename=rx.cap(2);
18 | extension=rx.cap(3);
19 |
20 | QRegExp rt("(\\d+$)"); // now look for digits in the basename
21 | pos=rt.indexIn(basename);
22 | basename.truncate(pos);
23 |
24 | // qDebug()< map;
42 |
43 |
44 | for (int i=0;icount();
62 |
63 | return true;
64 | }
65 |
66 | bool RegexSourceSink::StopAcquisition()
67 | {
68 | return true;
69 | }
70 |
71 | bool RegexSourceSink::ReleaseCamera()
72 | {
73 | delete goodFiles;
74 | return true;
75 | }
76 |
77 | bool RegexSourceSink::GrabFrame(ImagePacket &target, int indexIncrement)
78 | {
79 | if (index+indexIncrementcount() && index+indexIncrement>0) {
80 | index+=indexIncrement;
81 | QFile dum(dir+"/"+goodFiles->at(index));
82 | if (dum.exists()) {
83 | #ifdef Q_OS_WIN32
84 | target.image=cv::imread(dum.fileName().toStdString().c_str());
85 | #else
86 | target.image=cv::imread(dum.fileName().toUtf8().data()); //,cv::IMREAD_ANYDEPTH);
87 | #endif
88 | target.seqNumber=index;
89 | return true;
90 | } else {
91 | qDebug()<<"File did not exist"<-1) {
150 | dir=rx.cap(1);
151 | basename=rx.cap(2);
152 | } else {
153 | qDebug()<<"Recording Folder does not exist";
154 | dir=recFold;
155 | basename="image-";
156 | }
157 | // qDebug()<<"recFold"<::iterator iter = timestamps.begin(); iter != timestamps.end(); iter++){
185 | out << *iter<<"\n";
186 | }
187 | fileout.close();
188 | }
189 |
190 | // now write images; queue will be blocked while this is taking place!
191 | // also perform conversion of bayer images here.
192 | for (int i = 0; i < frames.size(); ++i) {
193 | cv::Mat temp=frames.at(i);
194 |
195 | if (pixFormat=="BAYERRG8") {
196 | cv::Mat dummy(temp.rows,temp.cols,CV_8UC3);
197 | cv::cvtColor(temp,dummy,cv::COLOR_BayerRG2RGB);
198 | temp=dummy;
199 | } else if (pixFormat=="BAYERGB8") {
200 | cv::Mat dummy(temp.rows,temp.cols,CV_8UC3);
201 | cv::cvtColor(temp,dummy,cv::COLOR_BayerGB2RGB);
202 | temp=dummy;
203 | } else if (pixFormat=="BAYERRG12") {
204 | cv::Mat dummy(temp.rows,temp.cols,CV_16UC3);
205 | cv::cvtColor(temp,dummy,cv::COLOR_BayerRG2RGB);
206 | temp=dummy;
207 | }
208 |
209 |
210 | QString filenam=QString(dir+"/"+basename+"%1"+extension).arg(i,8,10,QLatin1Char('0'));
211 | #ifdef Q_OS_WIN32
212 | cv::imwrite(filenam.toStdString().c_str(),temp);
213 | #else
214 | cv::imwrite(filenam.toUtf8().data(),temp);
215 | #endif
216 | }
217 |
218 | return true;
219 | }
220 |
221 | bool RegexSourceSink::IsOpened()
222 | {
223 | return true;
224 | }
225 |
226 | bool RegexSourceSink::SkipFrames(bool forward)
227 | {
228 | int skipping = 0;
229 | if (forward) {
230 | skipping=nFrames/10;
231 | } else {
232 | skipping=-nFrames/50;
233 | }
234 | // qDebug()<<"Will try to skip "<0) {
237 | index+=skipping;
238 | return true;
239 | } else {
240 | return true; // it is not a real error after all
241 | }
242 | }
243 |
244 |
245 |
246 |
--------------------------------------------------------------------------------
/main.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #if QT_VERSION >= 0x050000
3 | #include
4 | #endif
5 |
6 | #include "mainwindow.h"
7 | #include "maingui.h"
8 | #include "coordinator.h"
9 | #include "imagepacket.h"
10 |
11 | #ifdef KAFKA
12 | #include "kafkafrontend.h"
13 | #endif
14 |
15 |
16 | int main(int argc, char *argv[])
17 | {
18 | Coordinator theBoss;
19 |
20 | bool showGui(true);
21 | bool showGuiControls(true);
22 | bool remoteLog(false);
23 | bool remoteControls(false);
24 |
25 | #ifdef KAFKA
26 | QString settingFile("");
27 | QString logTopic("");
28 | QString cmdTopic("");
29 | QString cfgTopic("");
30 | QString groupid("");
31 | QString driver("0");
32 | QString devicenumber("-1");
33 | QString serial("-1");
34 | QString pixelclock("-1");
35 | bool autostartCam(false);
36 | QString host("localhost:9092");
37 | QString topicBasename("");
38 |
39 | if (argc==2) {
40 | QString theFile=QString::fromUtf8(argv[1]);
41 | if (QFile::exists(theFile)) {
42 | if (theFile.contains(".json")) {
43 | // given a json file => should read configuration data
44 | //qInfo("Found a json file; should interpret it");
45 | QFile file(theFile);
46 | file.open(QIODevice::ReadOnly|QIODevice::Text);
47 | QString val=file.readAll();
48 | file.close();
49 | QJsonDocument d = QJsonDocument::fromJson(val.toUtf8());
50 | if (d.isEmpty()) {
51 | qInfo("Problem parsing launch json file");
52 | }
53 | QJsonObject oo =d.object();
54 | QStringList kk=oo.keys();
55 | //qInfo("Got here: %s",val.toUtf8().data());
56 | if (kk.contains("kafka")) {
57 | QJsonObject kafkaSettings=oo["kafka"].toObject();
58 |
59 | // Now read the settings-file
60 | settingFile=kafkaSettings["settingsfile"].toString();
61 | qInfo("Found the file: %s",settingFile.toLatin1().data());
62 | if (QFile::exists(settingFile)) {
63 | if (settingFile.contains(".json")) {
64 | // given a json file => should read configuration data
65 | QFile fileSett(settingFile);
66 | fileSett.open(QIODevice::ReadOnly|QIODevice::Text);
67 | QString valSett=fileSett.readAll();
68 | fileSett.close();
69 | QJsonDocument dSett = QJsonDocument::fromJson(valSett.toUtf8());
70 | if (dSett.isEmpty()) {
71 | qInfo("Problem parsing kafka-settings json file");
72 | }
73 | QJsonObject ooSett =dSett.object();
74 | QStringList kkSett=ooSett.keys();
75 | //foreach (QString kitem, kkSett) {
76 | // qInfo("Found the key %s",kitem.toLatin1().data());
77 | //}
78 | //qInfo("Will try to read the settings file");
79 |
80 | if (kkSett.contains("connectParams")) {
81 | host=ooSett["connectParams"].toObject()["bootstrap.servers"].toString();
82 | //qInfo("Found the host %s",host.toLatin1().data());
83 | }
84 | if (kkSett.contains("topicBasename")) {
85 | topicBasename=ooSett["topicBasename"].toString() + QString(".");
86 | //qInfo("Found a topicBasename %s",topicBasename.toLatin1().data());
87 | }
88 | }
89 | }
90 |
91 | // these topics do not yet contain the topicBasename
92 | logTopic=topicBasename+kafkaSettings["logtopic"].toString();
93 | cmdTopic=topicBasename+kafkaSettings["cmdtopic"].toString();
94 | cfgTopic=topicBasename+kafkaSettings["cfgtopic"].toString();
95 | groupid=kafkaSettings["groupid"].toString();
96 |
97 |
98 | }
99 | if (kk.contains("interface")) {
100 | QJsonObject controls=oo["interface"].toObject();
101 | showGui=controls["showgui"].toBool(true);
102 | showGuiControls=controls["showguicontrols"].toBool(true);
103 | remoteLog=controls["remotelog"].toBool(false);
104 | remoteControls=controls["remotecontrols"].toBool(false);
105 | //qInfo("Found settings: %i, %i, %i, %i",showGui,showGuiControls,remoteLog,remoteControls);
106 | }
107 | if (kk.contains("camera")) {
108 | QJsonObject camera=oo["camera"].toObject();
109 | driver=camera["driver"].toString();
110 | devicenumber=camera["devicenumber"].toString("-1");
111 | serial=camera["cameraserial"].toString("-1");
112 | pixelclock=camera["pixelclock"].toString("-1");
113 |
114 | // now combine all these parameters into driver
115 | driver=driver+QString("@")+devicenumber+QString("@")+serial+QString("@")+pixelclock;
116 | //qInfo()<<"The full driver name will be "<("ImagePacket");
127 | KafkaFrontend kafka(&theBoss);
128 | if (remoteLog) {
129 | qInfo("Doing the remote log");
130 | kafka.makePublisher(logTopic);
131 | }
132 | if (remoteControls) {
133 | qInfo("Doing the remote control");
134 | kafka.makeConsumers(cfgTopic,cmdTopic,groupid);
135 | }
136 |
137 | if (autostartCam) {
138 | //theBoss.controlCameraThread(true);
139 | emit theBoss.LoadNewMovie(driver);
140 | }
141 | return a.exec();
142 | }
143 | #endif
144 |
145 | // fall through to original gui if second argument is not kafka
146 | QApplication a(argc, argv);
147 | qRegisterMetaType("ImagePacket");
148 | MainWindow win;
149 | MainGui view(&theBoss);
150 | win.setCentralWidget(&view);
151 |
152 |
153 | QShortcut *fullToggle = new QShortcut(QKeySequence(Qt::Key_F),&win);
154 | QShortcut *closeEv = new QShortcut(QKeySequence(Qt::Key_Escape),&win);
155 |
156 | QObject::connect(&win,SIGNAL(windowClosed()),&theBoss,SLOT(stopAcquisition()));
157 | QObject::connect(&view,SIGNAL(closeApplic()),&win,SLOT(close()));
158 | QObject::connect(fullToggle,SIGNAL(activated()),&win,SLOT(toggleFullscreen()));
159 | QObject::connect(closeEv,SIGNAL(activated()),&win,SLOT(close()));
160 |
161 | win.showMaximized();
162 |
163 | if (argc==2) {
164 | QString theFile=QString::fromUtf8(argv[1]);
165 | if (not theFile.contains(".json")) {
166 | view.newMoviePressed(theFile);
167 | }
168 | }
169 |
170 | #ifdef KAFKA
171 | // placed here, gui and logging or gui and controls can be activated!
172 | KafkaFrontend kafka(&theBoss); // cannot be placed inside the if-statement or else not responding to events! => will always exist if Kafka enabled..
173 | if (remoteLog or remoteControls) {
174 | //KafkaFrontend kafka(&theBoss);
175 | if (remoteLog) {
176 | kafka.makePublisher(logTopic,host);
177 | qInfo("Making the remote log %s",logTopic.toLatin1().data());
178 | }
179 | if (remoteControls) {
180 | kafka.makeConsumers(cfgTopic,cmdTopic,groupid,host);
181 | qInfo("Doing the remote control %s and configuration %s",qUtf8Printable(cmdTopic),qUtf8Printable(cfgTopic));
182 | }
183 | }
184 |
185 | if (autostartCam) {
186 | //theBoss.controlCameraThread(true);
187 | view.newMoviePressed(driver);
188 | }
189 |
190 | if (not showGuiControls) {
191 | view.showInputControls(false);
192 | view.showCameraControls(false);
193 | view.showPlaybackControls(false);
194 | }
195 | #endif
196 |
197 | return a.exec();
198 | }
199 |
200 |
201 |
--------------------------------------------------------------------------------
/GigaViewer.pro:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------
2 | #
3 | # Project created by QtCreator 2012-01-04T15:26:37
4 | #
5 | #-------------------------------------------------
6 | QT += core gui widgets
7 |
8 |
9 | TARGET = GigaViewer
10 | TEMPLATE = app
11 |
12 | CONFIG += HDF5 # enable HDF5 format for storing and reading files
13 | CONFIG += IDS # use GigE and USB3 cameras from IDS: https://en.ids-imaging.com/
14 | #CONFIG += PVAPI # use GigE cameras from Prosilica (now AVT). Available on Windows/Mac/Linux: https://www.alliedvision.com
15 | #CONFIG += VIMBA # use GigE and USB3 cameras from AVT (newer version of above). For now only Windows/Linux: https://www.alliedvision.com
16 | # on Windows also support for Firewire cameras
17 | #CONFIG += ELLIPSE # enable real-time detection of ellipses in the image (backend-plugin)
18 | CONFIG += INTERFERO # enable real-time analysis of interferograms (frontend-only plugin)
19 | CONFIG += AMPLI # enable gui-plotting of a line for amplitude optimisation (frontend-only plugin)
20 |
21 | CONFIG += KAFKA # enable the kafka frontend option
22 |
23 |
24 |
25 |
26 | #CONFIG += console
27 |
28 | HDF5 {
29 | DEFINES *= ENABLE_HDF5
30 | }
31 | IDS {
32 | DEFINES *= IDS
33 | # TARGET = GigaViewer_Cams
34 | }
35 | PVAPI {
36 | DEFINES *= PVAPI
37 | # TARGET = GigaViewer_Cams
38 | }
39 | VIMBA {
40 | DEFINES *= VIMBA
41 | # TARGET = GigaViewer_Cams
42 | # IDS {
43 | # TARGET = GigaViewer_Cams
44 | # }
45 |
46 | }
47 |
48 | ELLIPSE {
49 | DEFINES *= ELLIPSE
50 | TARGET = GigaViewer_Plugins
51 | }
52 |
53 | INTERFERO {
54 | DEFINES *= INTERFERO
55 | TARGET = GigaViewer_Plugins
56 | }
57 |
58 | AMPLI {
59 | QT += charts
60 | DEFINES *= AMPLI
61 | TARGET = GigaViewer_Plugins
62 | }
63 |
64 | KAFKA {
65 | DEFINES *= KAFKA
66 | }
67 |
68 | #message(The Defines are $$DEFINES)
69 |
70 | win32 {
71 | CONFIG += console # switch this on if you want to see some debug output to the console.
72 | message(Compiling for windows)
73 | INCLUDEPATH += E:\opencv\build\include
74 | QMAKE_LIBDIR += "E:\opencv\build\x64\vc14\lib"
75 | HDF5 {
76 | QMAKE_INCDIR += "C:\Program Files\HDF_Group\HDF5\1.8.19\include" #this cannot have a space => copy installed hdf5 folder to the root
77 | QMAKE_LIBDIR += "C:\Program Files\HDF_Group\HDF5\1.8.19\lib"
78 | LIBS += -lhdf5 -lhdf5_cpp -lhdf5_hl -lhdf5_hl_cpp
79 | }
80 |
81 | PVAPI {
82 | INCLUDEPATH += "C:\Program Files\Allied Vision Technologies\GigESDK\inc-pc"
83 | LIBS += "C:\Program Files\Allied Vision Technologies\GigESDK\lib-pc\x64\PvAPI.lib" \
84 | "C:\Program Files\Allied Vision Technologies\GigESDK\lib-pc\x64\ImageLib.lib" \
85 | }
86 | VIMBA {
87 | # INCLUDEPATH += "C:\Program Files\Allied Vision Technologies\AVTVimba_1.3"
88 | INCLUDEPATH += "C:\Program Files\Allied Vision\Vimba_2.0"
89 | # LIBS += "C:\Program Files\Allied Vision Technologies\AVTVimba_1.3\VimbaCPP\Lib\Win32\VimbaCPP.lib"
90 | LIBS += "C:\Program Files\Allied Vision\Vimba_2.0\VimbaCPP\Lib\Win64\VimbaCPP.lib"
91 | }
92 | IDS {
93 | INCLUDEPATH += "C:\Program Files\IDS\uEye\Develop\include"
94 | LIBS += "C:\Program Files\IDS\uEye\Develop\Lib\uEye_api_64.lib"
95 | }
96 | # LIBS += -lopengl32 -lopencv_core2411 -lopencv_imgproc2411 -lopencv_highgui2411 -lopencv_video2411
97 | # LIBS += -lopencv_imgcodecs -lopencv_videoio # for opencv 3.0 these packages are necessary modify for correct suffix
98 | LIBS += -lopengl32
99 | CONFIG(release, debug|release) {
100 | LIBS += -lopencv_world330
101 | }
102 | CONFIG(debug, debug|release) {
103 | LIBS += -lopencv_world330d
104 | }
105 |
106 | }
107 |
108 | unix:!macx {
109 | message(Compiling for Linux)
110 | CONFIG += c++11
111 |
112 | LIBS += -L /usr/local/lib # store PvAPI and VimbaCPP libraries here
113 |
114 | HDF5 {
115 | QMAKE_INCDIR += /usr/include/hdf5/serial
116 | QMAKE_LIBDIR += /usr/lib/x86_64-linux-gnu/hdf5/serial
117 | LIBS += -lhdf5 -lhdf5_hl -lhdf5_cpp
118 | }
119 |
120 |
121 | #LIBS += `pkg-config --libs opencv4` # this command should handle opencv 2.4 and 3.0. If not, use lines below
122 | #QMAKE_INCDIR += /usr/include/opencv4
123 | LIBS += `pkg-config --libs opencv` # this command should handle opencv 2.4 and 3.0. If not, use lines below
124 | #LIBS += -pthread -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_video
125 | #LIBS += -lopencv_imgcodecs -lopencv_videoio # for opencv 3.0 these packages are necessary
126 |
127 | PVAPI {
128 | LIBS += -lPvAPI
129 | }
130 | VIMBA {
131 | LIBS += -lVimbaCPP
132 | }
133 | IDS {
134 | LIBS += -lueye_api
135 | }
136 | KAFKA {
137 | LIBS += -lrdkafka -lz -lpthread
138 | }
139 | }
140 | macx {
141 | message(Compiling for Mac) # No camera modules supported so far!
142 | CONFIG += c++11
143 | QMAKE_INCDIR += /usr/local/include
144 | QMAKE_LIBDIR += /usr/local/lib
145 | # QMAKE_INCDIR += /sw/include # in case opencv or Hdf5 were obtained from Fink uncomment this line
146 | # QMAKE_LIBDIR += /sw/lib # in case opencv or Hdf5 were obtained from Fink uncomment this line
147 | VIMBA|IDS|PVAPI {
148 | message(No camera modules support so far!!! Change configuration in GigaViewer.pro.)
149 | }
150 | HDF5 {
151 | QMAKE_INCDIR += /usr/local/HDF_Group/HDF5/1.8.16/include
152 | QMAKE_LIBDIR += /usr/local/HDF_Group/HDF5/1.8.16/lib
153 | LIBS += -lhdf5 -lhdf5_hl -lhdf5_cpp -lhdf5_hl_cpp
154 | }
155 | LIBS += -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_video
156 | # LIBS += -lopencv_imgcodecs -lopencv_videoio # for opencv 3.0 these packages are necessary so uncomment
157 | }
158 |
159 | PVAPI {
160 | message(Including PVAPI libraries)
161 | SOURCES += avtsourcesink.cpp
162 | HEADERS += avtsourcesink.h
163 | }
164 |
165 | IDS {
166 | message(Including IDS libraries)
167 | SOURCES += idssourcesink.cpp
168 | HEADERS += idssourcesink.h
169 | }
170 |
171 | VIMBA {
172 | message(Including VIMBA libraries)
173 | SOURCES += vimbasourcesink.cpp vimbaframeobserver.cpp
174 | HEADERS += vimbasourcesink.h \
175 | vimbaframeobserver.h
176 | }
177 |
178 | HDF5 {
179 | SOURCES += hdf5sourcesink.cpp
180 | HEADERS += hdf5sourcesink.h
181 | }
182 |
183 | SOURCES += main.cpp \
184 | imagepacket.cpp \
185 | videoglscene.cpp \
186 | fileinputdialog.cpp \
187 | coordinator.cpp \
188 | maingui.cpp \
189 | picbackend.cpp \
190 | cambackend.cpp \
191 | playbackdialog.cpp \
192 | mainwindow.cpp \
193 | opencvsourcesink.cpp \
194 | imagesourcesink.cpp \
195 | regexsourcesink.cpp \
196 | cameracontrolsdialog.cpp \
197 | xvisourcesink.cpp \
198 | mrfsourcesink.cpp \
199 | fmfbufferedsourcesink.cpp \
200 | fmfsourcesink.cpp
201 |
202 | ELLIPSE {
203 | SOURCES += ellipsedetection.cpp \
204 | ellipsedetectiondialog.cpp
205 |
206 | HEADERS += ellipsedetection.h \
207 | ellipsedetectiondialog.h
208 |
209 | FORMS += ellipsedetectiondialog.ui
210 | }
211 |
212 | INTERFERO {
213 | SOURCES += \
214 | interferoplugindialog.cpp
215 |
216 | HEADERS += \
217 | interferoplugindialog.h
218 |
219 | FORMS += interferoplugindialog.ui
220 | }
221 |
222 | AMPLI {
223 | SOURCES += ampliplugindialog.cpp
224 |
225 | HEADERS += ampliplugindialog.h
226 |
227 | FORMS += ampliplugindialog.ui
228 | }
229 |
230 | KAFKA {
231 | SOURCES += kafkafrontend.cpp \
232 | kafkacontroller.cpp
233 | HEADERS += kafkafrontend.h \
234 | kafkacontroller.h
235 | }
236 |
237 | HEADERS += \
238 | videoglscene.h \
239 | fileinputdialog.h \
240 | coordinator.h \
241 | maingui.h \
242 | picbackend.h \
243 | cambackend.h \
244 | imagepacket.h \
245 | playbackdialog.h \
246 | mainwindow.h \
247 | opencvsourcesink.h \
248 | fmfsourcesink.h \
249 | imagesourcesink.h \
250 | cameracontrolsdialog.h \
251 | regexsourcesink.h \
252 | xvisourcesink.h \
253 | mrfsourcesink.h \
254 | fmfbufferedsourcesink.h
255 |
256 |
257 | FORMS += \
258 | fileinputdialog.ui \
259 | playbackdialog.ui \
260 | cameracontrolsdialog.ui \
261 |
262 |
263 | OTHER_FILES += \
264 | README.txt \
265 | gigaviewer.desktop \
266 | GigaViewer.svg \
267 | video-fmf.xml \
268 | CompilingLinux.txt \
269 | CompilingMac.txt \
270 | CompilingWindows.txt
271 |
272 | RESOURCES += \
273 | icons.qrc
274 |
275 | DISTFILES += \
276 | TODO.txt \
277 | FMF.py
278 |
--------------------------------------------------------------------------------
/playbackdialog.cpp:
--------------------------------------------------------------------------------
1 | #include "playbackdialog.h"
2 | #include "ui_playbackdialog.h"
3 | #include
4 |
5 | PlaybackDialog::PlaybackDialog(QWidget *parent) :
6 | QDialog(parent),
7 | ui(new Ui::PlaybackDialog),recording(false)
8 | {
9 | ui->setupUi(this);
10 | currentTimer=100;
11 | ui->RecFolder->setText(QDir::homePath());
12 | #ifndef ENABLE_HDF5
13 | ui->codecBox->removeItem(2);
14 | #endif
15 |
16 | connect(&timer1,SIGNAL(timeout()), this, SLOT (finishedFirstTimer()));
17 | connect(&timer2,SIGNAL(timeout()), this, SLOT (finishedSecondTimer()));
18 | }
19 |
20 | PlaybackDialog::~PlaybackDialog()
21 | {
22 | delete ui;
23 | }
24 |
25 | void PlaybackDialog::on_stopButton_clicked()
26 | {
27 | ui->playButton->setChecked(true);
28 | ui->recButton->setChecked(false);
29 | ui->fpsEdit->setText("100");
30 | recording=false;
31 | emit stopPlayback();
32 | }
33 |
34 | void PlaybackDialog::on_ffwdButton_clicked()
35 | {
36 | emit jumpFrames(true);
37 | }
38 |
39 | void PlaybackDialog::on_rwdButton_clicked()
40 | {
41 | emit jumpFrames(false);
42 | }
43 |
44 | void PlaybackDialog::on_playButton_toggled(bool checked)
45 | {
46 | if (checked) {
47 | emit newFps(currentTimer);
48 | } else {
49 | emit newFps(3600000); // such a long time that it appears paused
50 | }
51 | }
52 |
53 | void PlaybackDialog::on_recButton_toggled(bool checked)
54 | {
55 | recording=checked;
56 | QString recf=ui->RecFolder->text();
57 | QString cod=ui->codecBox->currentText();
58 | int recSk=ui->recSkippingEdit->text().toInt();
59 | emit recordNow(checked,recf,cod,recSk);
60 | if (recording) {
61 | ui->LeftStatus->setText("Recording");
62 | } else {
63 | ui->LeftStatus->setText("");
64 | }
65 | }
66 |
67 | void PlaybackDialog::on_RecSettings_clicked()
68 | {
69 | ui->stackedWidget->setCurrentIndex(1);
70 | }
71 |
72 | void PlaybackDialog::on_backButton_clicked()
73 | {
74 | ui->stackedWidget->setCurrentIndex(0);
75 | }
76 |
77 | void PlaybackDialog::on_toolButton_clicked()
78 | {
79 | QString old = ui->RecFolder->text();
80 | // if (old=="") {
81 | QString fold= QFileDialog::getExistingDirectory(nullptr,tr("Select recording folder"),QDir::homePath(),QFileDialog::ShowDirsOnly);
82 | // } else {
83 | // QString fold= QFileDialog::getExistingDirectory(this,tr("Select recording folder"),QDir::homePath(),QFileDialog::ShowDirsOnly);
84 | // }
85 | ui->RecFolder->setText(fold);
86 | }
87 |
88 | void PlaybackDialog::togglePlay()
89 | {
90 | ui->playButton->toggle();
91 | }
92 |
93 | void PlaybackDialog::reversePlay()
94 | {
95 | currentTimer=-abs(currentTimer);
96 | QString delayTxt=QString("%1").arg(currentTimer);
97 | ui->fpsEdit->setText(delayTxt);
98 | ui->playButton->setChecked(true);
99 | emit newFps(currentTimer);
100 | }
101 |
102 | void PlaybackDialog::forwardPlay()
103 | {
104 | currentTimer=abs(currentTimer);
105 | QString delayTxt=QString("%1").arg(currentTimer);
106 | ui->fpsEdit->setText(delayTxt);
107 | ui->playButton->setChecked(true);
108 | emit newFps(currentTimer);
109 | }
110 |
111 | void PlaybackDialog::on_fpsEdit_returnPressed()
112 | {
113 | QString valStr=ui->fpsEdit->text();
114 | if (valStr.contains("/")) {
115 | QStringList vl=valStr.split("/");
116 | if (vl.count()==2) {
117 | currentTimer=1000.0/vl[1].toInt();
118 | } else {
119 | qDebug()<<"Could not understand fps setting";
120 | }
121 | if (valStr.at(0)=='-') currentTimer=-currentTimer;
122 | } else {
123 | int tim1;
124 | double msecs1;
125 | if (parseInstruct(valStr,tim1,msecs1)) {
126 | currentTimer=tim1*1000.0; // shouldn't this be msecs1???
127 | } else {
128 | currentTimer=valStr.toDouble();
129 | }
130 | }
131 | // qInfo()<<"delay="<RightStatus->setText(frameTxt);
139 | }
140 |
141 | void PlaybackDialog::showNewFps(double msec) {
142 | double fps=1000.0/msec;
143 | QString fpstext="1/"+QString::number(fps);
144 | ui->fpsEdit->setText(fpstext);
145 | }
146 |
147 | void PlaybackDialog::on_horizontalSlider_valueChanged(int value)
148 | {
149 | // qInfo()<<"Fps val given: "<10) {
152 | newFrameRate=value-9;
153 | } else {
154 | newFrameRate=value/10.0;
155 | }
156 | // qInfo()<<"Converted new value "<fpsEdit->text();
158 | QString delayTxt;
159 | int newVal;
160 | if (oldText.at(0)=='-') {
161 | delayTxt=QString("-1/%1").arg(newFrameRate);
162 | newVal=static_cast(round(-1000/newFrameRate));
163 | } else {
164 | delayTxt=QString("1/%1").arg(newFrameRate);
165 | newVal=static_cast(round(1000/newFrameRate));
166 | }
167 | ui->fpsEdit->setText(delayTxt);
168 |
169 | currentTimer=newVal;
170 | emit newFps(currentTimer);
171 | ui->playButton->setChecked(true);
172 | }
173 |
174 |
175 | // should ask user how long he wants to record
176 | void PlaybackDialog::on_recTimedButton_toggled(bool checked)
177 | {
178 | recording=checked;
179 | QString instruct;
180 | bool ok=true;
181 | if (recording) {
182 | instruct=QInputDialog::getText(nullptr,"Shutdown timer instructions","Format ~ '2m10s@30fps/3h@0.5fps' : ");
183 | // check if format is ok
184 | QRegExp rx("(.+)/(.+)");
185 | int pos=0;
186 | pos=rx.indexIn(instruct);
187 | if (pos==-1) {
188 | config1=instruct;
189 | config2="";
190 | } else {
191 | config1=rx.cap(1);
192 | config2=rx.cap(2);
193 | }
194 |
195 | have2timers=false;
196 | int tim1;
197 | double msecs1;
198 | if (parseInstruct(config1,tim1,msecs1)) {
199 | // qInfo()<<"Got here with: "<1) {
203 | ui->fpsEdit->setText(QString::number(msecs1));
204 | emit newFps(msecs1);
205 | }
206 | } else {
207 | ok=false;
208 | }
209 |
210 | int tim2;
211 | double msecs2;
212 | if (parseInstruct(config2,tim2,msecs2)) {
213 | // qInfo()<<"Got here also with: "<1) {
215 | have2timers=true;
216 | timer2.setInterval(1000*(tim1+tim2));
217 | timer2.setSingleShot(true);
218 | secondDelay=msecs2;
219 | } else {
220 | qDebug()<<"Not a correct use of second timer fps: "<recTimedButton->toggle();
229 | return;
230 | }
231 |
232 | }
233 |
234 | QString recf=ui->RecFolder->text();
235 | QString cod=ui->codecBox->currentText();
236 | int recSk=ui->recSkippingEdit->text().toInt();
237 | emit recordNow(checked,recf,cod,recSk);
238 | if (recording) {
239 | ui->LeftStatus->setText("Recording");
240 | timer1.start();
241 | if (have2timers) timer2.start();
242 | } else {
243 | ui->LeftStatus->setText("");
244 | }
245 | }
246 |
247 | bool PlaybackDialog::parseInstruct(QString instruct, int& sec, double& msecdelay) {
248 | QRegExp secSearch("(\\d+)s");
249 | QRegExp minSearch("(\\d+)m");
250 | QRegExp hourSearch("(\\d+)h");
251 | QRegExp adsearch("(.+)@(.+)");
252 | QRegExp fpsSearch("(.+)fps");
253 | bool ok=false;
254 | sec=0;
255 | msecdelay=-1;
256 | int pos;
257 | pos=secSearch.indexIn(instruct);
258 | if (pos!=-1) {
259 | int newsec=secSearch.cap(1).toInt(&ok);
260 | if (ok) sec+=newsec;
261 | }
262 | pos=minSearch.indexIn(instruct);
263 | if (pos!=-1) {
264 | int newmin=minSearch.cap(1).toInt(&ok);
265 | if (ok) sec+=(newmin*60);
266 | }
267 | pos=hourSearch.indexIn(instruct);
268 | if (pos!=-1) {
269 | int newhour=hourSearch.cap(1).toInt(&ok);
270 | if (ok) sec+=(newhour*3600);
271 | }
272 |
273 | pos=adsearch.indexIn(instruct);
274 | if (pos!=-1) {
275 | QString fpsstring=adsearch.cap(2);
276 | pos=fpsSearch.indexIn(fpsstring);
277 | if (pos!=-1) {
278 | double fp=fpsSearch.cap(1).toDouble(&ok);
279 | if (ok) {
280 | msecdelay=1000.0/fp;
281 | }
282 | }
283 | }
284 |
285 | return (sec>0);
286 | }
287 |
288 | void PlaybackDialog::finishedFirstTimer() {
289 | if (recording) {
290 | if (have2timers) {
291 | // don't stop recording, just change fps
292 | ui->fpsEdit->setText(QString::number(secondDelay));
293 | emit newFps(secondDelay);
294 | } else {
295 | ui->recTimedButton->toggle();
296 | ui->stopButton->click();
297 | }
298 | }
299 | }
300 |
301 | void PlaybackDialog::finishedSecondTimer() {
302 | if (recording) {
303 | ui->recTimedButton->toggle();
304 | ui->stopButton->click();
305 | }
306 | }
307 |
308 | void PlaybackDialog::on_snapshotButton_clicked() {
309 | QString fold= QFileDialog::getSaveFileName(nullptr,QString("Where should I save the snapshot?"));
310 | //qInfo()<<"Gotten this output: "<-1) {
107 | basename=rx.cap(1)+"/"+rx.cap(2);
108 | } else {
109 | qDebug()<<"Recording Folder does not exist";
110 | QDateTime mom = QDateTime::currentDateTime();
111 | basename=mom.toString("yyyyMMdd-hhmmss");
112 | }
113 | // qInfo()<<"recFold"<(3*bitsperpixel*static_cast(round(rows*cols/8))+sizeof(double));
169 | } else {
170 | bytesperchunk=static_cast(bitsperpixel*static_cast(round(rows*cols/8))+sizeof(double));
171 | }
172 | // bytes encoding a frame
173 | if(fwrite(&bytesperchunk,sizeof(uint64_t),1,fmfrec)<1){
174 | fprintf(stderr,"Error writing bytes per chunk to output fmf file.\n");
175 | return(QString(""));
176 | }
177 | // qInfo()<<"bytesperchunk will be: "<(frames.size());
199 | fseek(fmfrec,recNframespos,SEEK_SET);
200 | if (fwrite(&nWritten,sizeof(uint64_t),1,fmfrec)<1) qDebug()<<"Error writing number of frames to fmf file";
201 | //fclose(fmfrec);
202 |
203 | // now write out FMF
204 | for (int i = 0; i < frames.size(); ++i) {
205 | //qInfo()<<"Writing frame to file";
206 | if (fwrite(×tamps[i],sizeof(double),1,fmfrec)==1) {
207 | //test here what the bitdepth of the source image is
208 | // if (source.image.depth()==2)
209 | cv::Mat temp=frames.at(i);//.clone();
210 | if (temp.channels()==3) {
211 | if (fwrite(temp.data,1,static_cast(3*temp.rows*temp.cols),fmfrec)!=static_cast(3*temp.rows*temp.cols)) {
212 | qWarning()<<"Issue with writing of frame";
213 | break;
214 | }
215 | } else {
216 | if (dataformat.contains("8")) {
217 | //qInfo()<<"Writing matrix";
218 | if (fwrite(temp.data,1,static_cast(temp.rows*temp.cols),fmfrec)!=static_cast(temp.rows*temp.cols)) {
219 | qWarning()<<"Issue with writing of frame";
220 | break;
221 | }
222 | } else if ((dataformat.contains("12"))||(dataformat.contains("14"))) {
223 | if (fwrite(temp.ptr(0),2,static_cast(temp.rows*temp.cols),fmfrec)!=static_cast(temp.rows*temp.cols)) {
224 | qWarning()<<"Issue with writing of frame";
225 | break;
226 | } else {
227 | qWarning()<<"Writing 12bit or 14bit frame unsuccessfull";
228 | break;
229 | }
230 | } else {
231 | qDebug()<<"Problem with dataformat: "<::iterator iter = timestamps.begin(); iter != timestamps.end(); iter++){
249 | out << *iter<<"\n";
250 | }
251 | fileout.close();
252 | }
253 | #endif
254 | return true;
255 | }
256 |
257 | bool FmfBufferedSourceSink::IsOpened()
258 | {
259 | return true;
260 | }
261 |
262 | bool FmfBufferedSourceSink::SkipFrames(bool forward) {
263 | int skipping = 0;
264 | if (forward) {
265 | skipping=nFrames/10;
266 | } else {
267 | skipping=-nFrames/50;
268 | }
269 | // qInfo()<<"Will try to skip "<= nFrames-1)||(currPos+skipping <0)) {
272 | return true;
273 | }
274 |
275 | #ifdef Q_OS_WIN32
276 | _fseeki64(fmf,(skipping-1)*bytesperchunk,SEEK_CUR);
277 | #else
278 | fseek(fmf,(skipping-1)*static_cast(bytesperchunk),SEEK_CUR);
279 | #endif
280 | currPos+=skipping;
281 | return true;
282 | }
283 |
284 | QDataStream &operator<<(QDataStream &out, const cv::Mat &matrix) {
285 | int totSize=(matrix.rows*matrix.cols);
286 | if (matrix.channels()==3) {
287 | totSize=totSize*3;
288 | }
289 | //uint bitspp=8;
290 | for (int i=0;i(i);
292 | }
293 | return out;
294 | }
295 |
296 |
297 |
--------------------------------------------------------------------------------
/kafkafrontend.cpp:
--------------------------------------------------------------------------------
1 | #include "kafkafrontend.h"
2 | #include
3 | //#include
4 |
5 | /* msgDeliveryCB: Is the delivery callback.
6 | * The delivery report callback will be called once for each message
7 | * accepted by rd_kafka_produce() with err set to indicate
8 | * the result of the produce request. An application must call rd_kafka_poll()
9 | * at regular intervals to serve queued delivery report callbacks.
10 | */
11 | static void msgDeliveryCB (rd_kafka_t *,
12 | const rd_kafka_message_t *rkmessage, void *) {
13 | if (rkmessage->err != RD_KAFKA_RESP_ERR_NO_ERROR) {
14 | qInfo("FAILURE: Message not delivered to partition.\n");
15 | qInfo("ERROR: %s", rd_kafka_err2str(rkmessage->err));
16 | } else {
17 | qInfo("Produced: %.*s\n",static_cast(rkmessage->len), static_cast(rkmessage->payload));
18 | }
19 | }
20 |
21 | KafkaFrontend::KafkaFrontend(Coordinator *boss,QObject *parent) : QObject(parent),prodHndl(nullptr),cmdPointer(nullptr),cfgPointer(nullptr),skipLogging(5),timeout(100),isRecording(false) {
22 | connect(boss,SIGNAL(NewImageReady(ImagePacket)),this,SLOT(newImageReceived(ImagePacket)));
23 | connect(this,SIGNAL(startRecording(bool,QString,QString,int)),boss,SLOT(StartRecording(bool,QString,QString,int)));
24 | connect(this,SIGNAL(implementNewFps(double)),boss,SLOT(changeFps(double)));
25 | connect(this,SIGNAL(setShutter(int)),boss,SLOT(changeShutter(int)));
26 | connect(this,SIGNAL(setRoiRows(int)),boss,SLOT(setRoiRows(int)));
27 | connect(this,SIGNAL(setRoiCols(int)),boss,SLOT(setRoiCols(int)));
28 |
29 | qRegisterMetaType< QList >( "QList" );
30 |
31 | }
32 |
33 | // this slot is called for each new image that arrives
34 | void KafkaFrontend::newImageReceived(ImagePacket theMatrix) {
35 | if (prodHndl!=nullptr) {
36 | // qInfo("Was contacted for a new image!");
37 | QJsonDocument qjson=QJsonDocument::fromVariant(theMatrix.message);
38 | // qInfo("Want to publish %s",qjson.toJson().data());
39 | //qInfo("skipLogging=%i",skipLogging);
40 | if (theMatrix.seqNumber%skipLogging==0) {
41 | publishMsg(QString("GigaViewer"),qjson);
42 | }
43 | }
44 | }
45 |
46 | // This will publish a JSon to the connected topic
47 | void KafkaFrontend::publishMsg(QString key, QJsonDocument value) {
48 | QByteArray msgQ=value.toJson();
49 | char* payload=msgQ.data();
50 | char* kkey=key.toLocal8Bit().data();
51 | if (rd_kafka_produce(prodTopicHndl,
52 | RD_KAFKA_PARTITION_UA,
53 | RD_KAFKA_MSG_F_COPY,
54 | payload,
55 | strlen(payload),
56 | kkey,
57 | strlen(kkey),
58 | nullptr) == -1) {
59 | int errNum = errno;
60 | qInfo("Failed to produce to topic : %s\n", rd_kafka_topic_name(prodTopicHndl));
61 | qInfo("Error Number: %d ERROR NAME: %s\n"
62 | ,errNum, rd_kafka_err2str(rd_kafka_last_error()));
63 | }
64 | }
65 |
66 | // this will put the commands into effect - look at all commands
67 | void KafkaFrontend::actOnCommands(QList commands) {
68 | if (commands.size()!=0) {
69 | for (int i=0;i configs) {
97 | if (configs.size()!=0) {
98 | QJsonObject qObj=configs[configs.size()-1].object(); //only apply last configuration
99 | QStringList kk=qObj.keys();
100 | if (kk.contains("exposure") and kk.contains("fps")) {
101 | changeParameters(qObj);
102 | } else {
103 | qInfo("Missing at least one config setting!");
104 | }
105 | }
106 | return;
107 | }
108 |
109 | // This will change the camera parameters
110 | void KafkaFrontend::changeParameters(QJsonObject newconfig) {
111 | //qInfo("Should change some camera parameters");
112 | int sshut=newconfig["exposure"].toInt();
113 | if (sshut==0) {
114 | //probably a double was given!
115 | sshut=static_cast(newconfig["exposure"].toDouble());
116 | }
117 | qInfo("Want to set shutter to: %i",sshut);
118 | emit setShutter(sshut);
119 |
120 | //double fps=newconfig["fps"].toDouble();
121 | //if (fps>2) {
122 | // skipLogging=static_cast(floor(fps/2)); // half of the framerate should lead to 2 kafka-logs per second..
123 | //} else {
124 | // skipLogging=1;
125 | //}
126 | //skipLogging=1;
127 |
128 | double newDelay=1000.0/newconfig["fps"].toDouble();
129 |
130 | int emitSkipped=static_cast(floor(25.0/newDelay)+1); // recalculate this value
131 | if (newDelay>100) {
132 | skipLogging=emitSkipped;
133 | } else {
134 | skipLogging=emitSkipped*10;
135 | }
136 | qInfo("Found delay %f and emitSkipped %i setting skipLogging to %i",newDelay,emitSkipped,skipLogging);
137 |
138 | //qInfo("Want to set delay to: %f",newDelay);
139 | emit implementNewFps(newDelay);
140 | /*int ccols=newconfig["roicols"].toInt();
141 | emit setRoiCols(ccols);
142 | int rrows=newconfig["roirows"].toInt();
143 | emit setRoiRows(rrows);*/
144 | }
145 |
146 | // this will start/stop recording
147 | void KafkaFrontend::changeRecording(bool start,QJsonObject parameters) {
148 | // qInfo("Should change some recording parameters");
149 | // QJsonObject commParams=instructs["command_params"].toObject();
150 | if (start) {
151 | // qInfo("Will start recording");
152 | QString recdirname= parameters["dirname"].toString();
153 | QDir basedir(recdirname);
154 | if (not basedir.exists()) recdirname="";
155 | emit startRecording(true, recdirname,parameters["codec"].toString(),parameters["recskip"].toInt());
156 | } else {
157 | // qInfo("Will stop recording");
158 | emit startRecording(false, "","",0);
159 | // isRecording=false;
160 | // } else {
161 | // qInfo("Didn't understand the recording sub-command");
162 | }
163 | }
164 |
165 |
166 |
167 | // this will make the publisher and connect to the publishing topic
168 | void KafkaFrontend::makePublisher(QString topic,QString host) {
169 | // something to collect the error messages
170 | char errstr[1000];
171 |
172 | // producer configuration object
173 | rd_kafka_conf_t *prodCfg;
174 |
175 | prodCfg = rd_kafka_conf_new();
176 | if (prodCfg == nullptr) {
177 | qInfo("Failed to create conf\n");
178 | }
179 |
180 | if (rd_kafka_conf_set(prodCfg, "bootstrap.servers", host.toLocal8Bit(), errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) {
181 | qInfo("rd_kafka_conf_set() failed with error: %s\n", errstr);
182 | }
183 |
184 | // set the callback for missed messages
185 | rd_kafka_conf_set_dr_msg_cb(prodCfg, msgDeliveryCB);
186 |
187 | // New that the configuration is ready, make the PRODUCER
188 | prodHndl = rd_kafka_new(RD_KAFKA_PRODUCER, prodCfg, errstr, sizeof(errstr));
189 | if (prodHndl == nullptr) {
190 | qInfo("Failed to create producer: %s\n", errstr);
191 | }
192 |
193 | // now do the same for the topic, first make configuration
194 | rd_kafka_topic_conf_t *prodTopicCfg;
195 | prodTopicCfg = rd_kafka_topic_conf_new();
196 | if (prodTopicCfg == nullptr) {
197 | qInfo("Failed to create new topic conf\n");
198 | }
199 |
200 | prodTopicHndl = rd_kafka_topic_new(prodHndl, topic.toLocal8Bit(), prodTopicCfg);
201 | if (prodTopicHndl == nullptr) {
202 | qInfo("Failed to create new topic handle\n");
203 | }
204 | prodTopicCfg = nullptr; /* Now owned by topic */
205 | }
206 |
207 | void KafkaFrontend::makeConsumers(QString cfgtopic,QString cmdtopic,QString groupId,QString host) {
208 | cfgPointer=new KafkaController(cfgtopic,groupId,host,"earliest",true,timeout);
209 | cfgThread=new QThread();
210 | cfgThread->start();
211 | cfgPointer->moveToThread(cfgThread);
212 |
213 | connect(this,SIGNAL(startTheTimer(int)),cfgPointer,SLOT(willStartTheTimer(int)));
214 | connect(this,SIGNAL(stopTheTimer()),cfgPointer,SLOT(willStopTheTimer()));
215 | connect(cfgPointer,SIGNAL(sendNewData(QList)),this,SLOT(actOnConfig(QList)));
216 |
217 | cmdPointer=new KafkaController(cmdtopic,groupId,host,"latest",true,timeout);
218 | cmdThread=new QThread();
219 | cmdThread->start();
220 | cmdPointer->moveToThread(cmdThread);
221 |
222 | connect(this,SIGNAL(startTheTimer(int)),cmdPointer,SLOT(willStartTheTimer(int)));
223 | connect(this,SIGNAL(stopTheTimer()),cmdPointer,SLOT(willStopTheTimer()));
224 | connect(cmdPointer,SIGNAL(sendNewData(QList)),this,SLOT(actOnCommands(QList)));
225 |
226 | emit startTheTimer(100); // both kafkacontrollers are listening to this signal to start their timers.
227 | }
228 |
--------------------------------------------------------------------------------
/interferoplugindialog.cpp:
--------------------------------------------------------------------------------
1 | #include "interferoplugindialog.h"
2 | #include "ui_interferoplugindialog.h"
3 | #include
4 |
5 |
6 | InterferoPluginDialog::InterferoPluginDialog(QWidget *parent) :
7 | QDialog(parent),ssSliderPressed(false),skSliderPressed(false),newReference(true),rowPeak(-1),colPeak(-1),subsample(1),skip(0),fringePeriod(0),fringeAngle(0),
8 | ui(new Ui::InterferoPluginDialog)
9 | {
10 | ui->setupUi(this);
11 |
12 |
13 | extractData();
14 | }
15 |
16 | bool InterferoPluginDialog::extractData() {
17 | activated=ui->activateBox->isChecked();
18 | skip=ui->skipSlider->value();
19 | if (subsample!=ui->subsampleSlider->value()) {
20 | newReference=true;
21 | }
22 | subsample=ui->subsampleSlider->value();
23 | return true;
24 | }
25 |
26 | bool InterferoPluginDialog::processImage(ImagePacket& currIm) {
27 | if (activated) {
28 | // deal with skips here.
29 | if (frameCounter%(skip+1)!=0) {
30 | frameCounter++;
31 | return false;
32 | }
33 | frameCounter+=1;
34 |
35 | if (currIm.pixFormat=="RGB8") {
36 | cv::Mat grayIm;
37 | cv::cvtColor(currIm.image,grayIm,cv::COLOR_RGB2GRAY);
38 | currIm.image=grayIm.clone();
39 | currIm.pixFormat="MONO8"; // will make it fall through to the next part.
40 | }
41 | if (currIm.pixFormat=="MONO8") {
42 | // first deal with subsampling
43 | cv::Mat subImage;
44 | cv::resize(currIm.image,subImage,cv::Size(),1.0/subsample,1.0/subsample,cv::INTER_NEAREST);
45 |
46 | // now do the fft
47 | cv::Mat padded; //expand input image to optimal size
48 | int m = cv::getOptimalDFTSize( subImage.rows );
49 | int n = cv::getOptimalDFTSize( subImage.cols ); // on the border add zero values
50 | if (m chan;
59 | cv::split(result,chan);
60 | padded.convertTo(chan[0],CV_32F);
61 | cv::Scalar me=cv::mean(chan[0]);
62 | // qDebug()<<"Mean of image is"<fringeLabel->setText(newLabel);
82 | newReference=false;
83 | }
84 | filterDft(result,colPeak-colPeak/2,3*colPeak/2);
85 |
86 | bool recentrePeak=true;
87 |
88 | if (recentrePeak) {
89 | centrePeak(result,colPeak,rowPeak);
90 | }
91 |
92 |
93 | bool showPhase=true;
94 | if (showPhase) {
95 | cv::idft(result,result);
96 | cv::split(result, chan); // planes[0] = Re(DFT(I), planes[1] = Im(DFT(I))
97 | cv::Mat phaseI;
98 | cv::phase(chan[0], chan[1], phaseI);// planes[0] = magnitude
99 |
100 | cv::Mat phaseCrop=phaseI(cv::Rect(0,0,subImage.cols,subImage.rows));
101 |
102 | adaptForScreen(phaseCrop);
103 | currIm.image=phaseCrop.clone();
104 | }
105 | //currIm.image=padded.clone();
106 |
107 | bool showFourierPlane=false;
108 |
109 | if (showFourierPlane) {
110 | // compute the magnitude and switch to logarithmic scale
111 | // => log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2))
112 | cv::split(result, chan); // planes[0] = Re(DFT(I), planes[1] = Im(DFT(I))
113 | cv::Mat magI;
114 | cv::magnitude(chan[0], chan[1], magI);// planes[0] = magnitude
115 |
116 | magI = magI(cv::Rect(0, 0, magI.cols & -2, magI.rows & -2));
117 |
118 | fftshift(magI,true);
119 |
120 | cv::Mat logMagI;
121 | cv::log(magI,logMagI);
122 | adaptForScreen(logMagI);
123 | currIm.image=logMagI.clone();
124 | }
125 |
126 |
127 |
128 | /* code to extract properties - not used for now
129 | magI = magI(cv::Rect(0, 0, magI.cols & -2, magI.rows & -2));
130 | int cx = magI.cols/2;
131 | int cy = magI.rows/2;
132 |
133 | cv::Mat q0(magI, cv::Rect(0, 0, cx, cy)); // Top-Left - Create a ROI per quadrant
134 | // cv::Mat q1(magI, cv::Rect(cx, 0, cx, cy)); // Top-Right
135 | cv::Mat q2(magI, cv::Rect(0, cy, cx, cy)); // Bottom-Left
136 | // cv::Mat q3(magI, cv::Rect(cx, cy, cx, cy)); // Bottom-Right
137 |
138 | cv::Mat tmp; // swap quadrants (Keep only left part of original (right after swap)
139 | q0.copyTo(tmp);
140 | q2.copyTo(q0);
141 | tmp.copyTo(q2);
142 |
143 | int xoffset=10; // to avoid using the DC peak, implies a minimum period of n/10 => 1024x1024 => 102 pixel period minimum!!
144 | cv::Mat newTemp=magI(cv::Rect(xoffset,0,cx-xoffset,magI.rows));
145 |
146 | cv::Point maxInd;
147 | double maxMag;
148 | cv::minMaxLoc(newTemp,nullptr,&maxMag,nullptr,&maxInd);
149 | double period=m/(sqrt(pow((maxInd.x),2.)+pow((maxInd.y-cy),2.))+xoffset); //problem if m!=n !!!! // add a '.' after the 2 to prevent error with type conversion msvc10 bug...
150 | double angle=atan2((double)(cy)-maxInd.y,maxInd.x)*180.0/3.1415965359; // type conversion of cy to avoid an error with msvc10, again...
151 | qDebug()<<"Period= "<IntensLabel->setText("Max Intensity: "+QString::number(max)+" - Dominant period: "+QString::number(period,'f',2)+" at "+QString::number(angle,'f',2)+QChar(0xB0));
155 | */
156 | }
157 | }
158 | return true;
159 | }
160 |
161 | void InterferoPluginDialog::fftshift(cv::Mat& I,bool forward) {
162 | int cx = I.cols/2;
163 | int cy = I.rows/2;
164 |
165 | if (forward) {
166 | cv::Mat q0(I, cv::Rect(0, 0, cx, cy)); // Top-Left - Create a ROI per quadrant
167 | cv::Mat q1(I, cv::Rect(cx, 0, cx, cy)); // Top-Right
168 | cv::Mat q2(I, cv::Rect(0, cy, cx, cy)); // Bottom-Left
169 | cv::Mat q3(I, cv::Rect(cx, cy, cx, cy)); // Bottom-Right
170 |
171 | cv::Mat tmp;
172 | q0.copyTo(tmp);
173 | q3.copyTo(q0);
174 | tmp.copyTo(q3);
175 |
176 | q1.copyTo(tmp);
177 | q2.copyTo(q1);
178 | tmp.copyTo(q2);
179 | }
180 | }
181 |
182 | void InterferoPluginDialog::filterDft(cv::Mat& I,int startC,int stopC) {
183 | cv::Mat filterMatrix=cv::Mat(I.rows,I.cols,CV_32FC2,cv::Scalar(0.0,0.0));
184 | cv::Mat on=cv::Mat(filterMatrix,cv::Rect(startC,0,stopC-startC,I.rows));
185 | on=cv::Scalar(1.0,1.0);
186 | GaussianBlur(filterMatrix,filterMatrix,cv::Size(21,1),0);
187 | cv::multiply(I, filterMatrix,I);
188 | }
189 |
190 | void InterferoPluginDialog::PeakFinder(cv::Mat input,int* rowPos,int* colPos) {
191 | cv::Mat result2;
192 | std::vector chan;
193 | cv::split(input,chan);
194 | cv::magnitude(chan[0],chan[1],result2);
195 | int xshift=20;
196 | cv::Mat subresult=cv::Mat(result2,cv::Rect(xshift,0,(result2.cols/2)-2*xshift,result2.rows));
197 |
198 | double min,max;
199 | cv::Point minLoc,maxLoc;
200 | minMaxLoc(subresult,&min,&max,&minLoc,&maxLoc);
201 | *rowPos=maxLoc.y;
202 | *colPos=maxLoc.x+xshift;
203 | }
204 |
205 | void InterferoPluginDialog::centrePeak(cv::Mat matrix,int peakx,int peaky) {
206 | int R=matrix.rows;
207 | int C=matrix.cols;
208 |
209 | cv::Mat topright=cv::Mat(matrix,cv::Rect(peakx,0,C-peakx,peaky));
210 | cv::Mat bottomright=cv::Mat(matrix,cv::Rect(peakx,peaky,C-peakx,R-peaky));
211 | cv::Mat topleft=cv::Mat(matrix,cv::Rect(0,0,peakx,peaky));
212 | cv::Mat bottomleft=cv::Mat(matrix,cv::Rect(0,peaky,peakx,R-peaky));
213 |
214 |
215 | cv::Mat dummy=cv::Mat(R,C,matrix.type(),cv::Scalar(0.0,0.0));
216 | cv::Mat Dbottomleft=cv::Mat(dummy,cv::Rect(0,R-peaky,C-peakx,peaky));
217 | cv::Mat Dtopleft=cv::Mat(dummy,cv::Rect(0,0,C-peakx,R-peaky));
218 | cv::Mat Dtopright=cv::Mat(dummy,cv::Rect(C-peakx,0,peakx,R-peaky));
219 | cv::Mat Dbottomright=cv::Mat(dummy,cv::Rect(C-peakx,R-peaky,peakx,peaky));
220 |
221 | topright.copyTo(Dbottomleft);
222 | bottomright.copyTo(Dtopleft);
223 | bottomleft.copyTo(Dtopright);
224 | topleft.copyTo(Dbottomright);
225 |
226 | dummy.copyTo(matrix);
227 | }
228 |
229 |
230 | void InterferoPluginDialog::adaptForScreen(cv::Mat& I) {
231 | double min,max;
232 | cv::minMaxLoc(I,&min,&max);
233 | double stretch=255.0/(max-min);
234 | double shift=-min*stretch;
235 | cv::Mat temp;
236 | I.convertTo(temp,CV_8U,stretch,shift);
237 | I=temp;
238 | }
239 |
240 |
241 |
242 | InterferoPluginDialog::~InterferoPluginDialog()
243 | {
244 | delete ui;
245 | }
246 |
247 | void InterferoPluginDialog::on_activateBox_stateChanged(int)
248 | {
249 | extractData();
250 | }
251 |
252 | void InterferoPluginDialog::on_newReferenceButton_clicked()
253 | {
254 | newReference=true;
255 | extractData();
256 | }
257 |
258 |
259 |
260 | void InterferoPluginDialog::on_subsampleSlider_sliderPressed()
261 | {
262 | ssSliderPressed=true;
263 | }
264 |
265 | void InterferoPluginDialog::on_subsampleSlider_sliderReleased()
266 | {
267 | ssSliderPressed=false;
268 | extractData();
269 | }
270 |
271 | void InterferoPluginDialog::on_subsampleSlider_valueChanged(int)
272 | {
273 | if (!ssSliderPressed) {
274 | extractData();
275 | }
276 | }
277 |
278 | void InterferoPluginDialog::on_skipSlider_sliderPressed()
279 | {
280 | skSliderPressed=true;
281 | }
282 |
283 | void InterferoPluginDialog::on_skipSlider_sliderReleased()
284 | {
285 | skSliderPressed=false;
286 | extractData();
287 | }
288 |
289 | void InterferoPluginDialog::on_skipSlider_valueChanged(int)
290 | {
291 | if (!skSliderPressed) {
292 | extractData();
293 | }
294 | }
295 |
296 |
--------------------------------------------------------------------------------
/maingui.cpp:
--------------------------------------------------------------------------------
1 | #include "maingui.h"
2 | #include
3 |
4 | MainGui::MainGui(Coordinator *boss,QWidget *parent) :
5 | QGraphicsView(parent)
6 | {
7 | setWindowTitle(tr("Gige Viewer"));
8 | // view.setViewport(new QGLWidget(QGLFormat(QGL::SampleBuffers))); //anti-aliased
9 |
10 | /* QOpenGLWidget *widget = new QOpenGLWidget(parent);
11 | QSurfaceFormat format;
12 | format.setDepthBufferSize(24);
13 | format.setStencilBufferSize(8);
14 | format.setVersion(3, 3);
15 | format.setProfile(QSurfaceFormat::CompatibilityProfile);
16 | widget->setFormat(format);
17 | setViewport(widget);
18 | */
19 | setViewport(new QOpenGLWidget());
20 | setViewportUpdateMode(QGraphicsView::FullViewportUpdate);
21 |
22 | fileDialog = new FileInputDialog;
23 | playDialog = new PlaybackDialog;
24 | camDialog = new CameraControlsDialog;
25 |
26 | QList controlDialogs;
27 | controlDialogs.append(fileDialog);
28 | controlDialogs.append(playDialog);
29 | controlDialogs.append(camDialog);
30 | #ifdef ELLIPSE
31 | ellipseDialog = new EllipseDetectionDialog;
32 | controlDialogs.append(ellipseDialog);
33 | #endif
34 | #ifdef INTERFERO
35 | interferoDialog = new InterferoPluginDialog;
36 | controlDialogs.append(interferoDialog);
37 | #endif
38 | #ifdef AMPLI
39 | ampliDialog = new AmpliPluginDialog;
40 | controlDialogs.append(ampliDialog);
41 | #endif
42 |
43 | // const QSize rect =parent->frameSize();
44 | // qDebug()<<"Frame size: "<)),boss,SLOT(newPluginSettingsReceived(QMap)));
108 | #endif
109 |
110 | setScene(theScene);
111 | getNewSample=false;
112 | recordSnapshot=false;
113 | }
114 |
115 | void MainGui::returnToStart()
116 | {
117 | showPlaybackControls(false);
118 | showCameraControls(false);
119 | showInputControls(true);
120 | showPluginDialogs(false);
121 | }
122 |
123 | void MainGui::resizeEvent(QResizeEvent *event)
124 | {
125 | if (scene())
126 | scene()->setSceneRect(QRect(QPoint(0, 0), event->size()));
127 | QGraphicsView::resizeEvent(event);
128 | }
129 |
130 | void MainGui::newImageReceived(ImagePacket theMatrix)
131 | {
132 | // frontend plugins will only act on the images at this stage!
133 | // backend plugins will act in cambackend; need to emit signals from gui-settings to backend
134 | bool pluginNoSkip=true;
135 | bool updatedFrameNr=false;
136 | #ifdef INTERFERO
137 | pluginNoSkip=interferoDialog->processImage(theMatrix);
138 | if (not pluginNoSkip) {
139 | if (not updatedFrameNr) emit newFrameNrShowing(theMatrix.seqNumber);
140 | updatedFrameNr=true;
141 | return; // if plugin returns false => skip image
142 | }
143 | #endif
144 | #ifdef AMPLI
145 | pluginNoSkip=ampliDialog->processImage(theMatrix);
146 | if (not pluginNoSkip) {
147 | if (not updatedFrameNr) emit newFrameNrShowing(theMatrix.seqNumber);
148 | updatedFrameNr=true;
149 | return; // if plugin returns false => skip image
150 | }
151 | #endif
152 |
153 |
154 | theScene->imageBuff=theMatrix.image;
155 | theScene->update();
156 | if (not updatedFrameNr) emit newFrameNrShowing(theMatrix.seqNumber);
157 | updatedFrameNr=true;
158 | if (getNewSample) {
159 | emit newSampleReady(theMatrix);
160 | getNewSample=false;
161 | }
162 | if (recordSnapshot) {
163 | bool succ=saveSnapshot(theMatrix);
164 | if (!succ) {
165 | qDebug()<<"Snapshot failed";
166 | }
167 | }
168 | }
169 |
170 | void MainGui::openCvFeedPressed()
171 | {
172 | showPlaybackControls(true);
173 | showInputControls(false);
174 | showPluginDialogs(true);
175 | emit newOpencvFeedNeeded(true);
176 | this->parentWidget()->setWindowTitle("OpenCV Feed");
177 | }
178 |
179 | void MainGui::stopButtonPressed()
180 | {
181 | emit newOpencvFeedNeeded(false);
182 | showPlaybackControls(false);
183 | showCameraControls(false);
184 | showInputControls(true);
185 | showPluginDialogs(false);
186 | this->parentWidget()->setWindowTitle("GigaViewer");
187 | }
188 |
189 | void MainGui::gotNewFps(double fps)
190 | {
191 | emit implementNewFps(fps);
192 | }
193 |
194 | void MainGui::gotNewFpsFromBackend(double fps)
195 | {
196 | emit showNewFps(fps);
197 | }
198 |
199 | void MainGui::gotNewShutSpeed(int shut)
200 | {
201 | camDialog->GotNewShutterSpeed(shut);
202 | }
203 |
204 | void MainGui::newMoviePressed(QString theString)
205 | {
206 | if (theString!="") {
207 | emit newMovieNeeded(theString);
208 | showPlaybackControls(true);
209 | showInputControls(false);
210 | if (theString.contains("IDS") | theString.contains("VIMBA")) {
211 | showCameraControls(true);
212 | }
213 | showPluginDialogs(true);
214 | this->parentWidget()->setWindowTitle(theString);
215 | }
216 | }
217 |
218 | void MainGui::showPlaybackControls(bool visible)
219 | {
220 | foreach (QGraphicsItem *item,theScene->items()) {
221 | if (item->data(0)=="PLAYBACK") {
222 | item->setVisible(visible);
223 | }
224 | }
225 | }
226 |
227 | void MainGui::showInputControls(bool visible)
228 | {
229 | foreach (QGraphicsItem *item,theScene->items()) {
230 | if (item->data(0)=="INPUT") {
231 | item->setVisible(visible);
232 | }
233 | }
234 | }
235 |
236 | void MainGui::showCameraControls(bool visible)
237 | {
238 | foreach (QGraphicsItem *item,theScene->items()) {
239 | if (item->data(0)=="CAMERA") {
240 | item->setVisible(visible);
241 | }
242 | }
243 | }
244 |
245 | void MainGui::showPluginDialogs(bool visible) {
246 | foreach (QGraphicsItem *item,theScene->items()) {
247 | if (item->data(0)=="PLUGIN") {
248 | item->setVisible(visible);
249 | }
250 | }
251 | }
252 |
253 | void MainGui::AVTFeedPressed()
254 | {
255 | showPlaybackControls(true);
256 | showInputControls(false);
257 | showCameraControls(true);
258 | showPluginDialogs(true);
259 | emit newAvtFeedNeeded(true);
260 | this->parentWidget()->setWindowTitle("AVT Live Camera Feed");
261 | }
262 |
263 | void MainGui::VimbaFeedPressed()
264 | {
265 | emit newVimbaFeedNeeded(true); // if this fails, the controls will be reset but this will be 'undone' by next commands
266 | showPlaybackControls(true);
267 | showInputControls(false);
268 | showCameraControls(true);
269 | showPluginDialogs(true);
270 | this->parentWidget()->setWindowTitle("Vimba Live Camera Feed");
271 | }
272 |
273 | void MainGui::IdsFeedPressed()
274 | {
275 | emit newIdsFeedNeeded(true); // if this fails, the controls will be reset but this will be 'undone' by next commands
276 | showPlaybackControls(true);
277 | showInputControls(false);
278 | showCameraControls(true);
279 | showPluginDialogs(true);
280 | this->parentWidget()->setWindowTitle("Ids Live Camera Feed");
281 | }
282 |
283 | void MainGui::needNewSample()
284 | {
285 | getNewSample=true;
286 | }
287 |
288 | void MainGui::getSnapshot(QString location) {
289 | if (location.endsWith(".png")) {
290 | // qDebug()<<"Ends in png";
291 | } else if (location.endsWith(".bmp")) {
292 | // qDebug()<<"Ends in bmp";
293 | } else if (location.endsWith(".jpg")) {
294 | // qDebug()<<"Ends in jpg";
295 | } else {
296 | location=location+".png";
297 | }
298 | snapshotLocation=location;
299 | recordSnapshot=true;
300 | }
301 |
302 | bool MainGui::saveSnapshot(ImagePacket theImage) {
303 | bool succ=cv::imwrite(snapshotLocation.toStdString().c_str(),theImage.image);
304 | recordSnapshot=false;
305 | return succ;
306 | }
307 |
308 |
309 |
310 |
--------------------------------------------------------------------------------