├── library
├── export.txt
├── jna.jar
└── gst1-java-core-1.4.0.jar
├── .github
└── FUNDING.yml
├── resources
├── code
│ ├── ExampleTaglet.class
│ ├── ant-contrib-1.0b3.jar
│ ├── doc.sh
│ └── ExampleTaglet.java
└── stylesheet.css
├── examples
├── Movie
│ ├── Loop
│ │ ├── data
│ │ │ └── launch2.mp4
│ │ └── Loop.pde
│ ├── Speed
│ │ ├── data
│ │ │ └── launch2.mp4
│ │ └── Speed.pde
│ ├── Frames
│ │ ├── data
│ │ │ └── launch2.mp4
│ │ └── Frames.pde
│ ├── Pixelate
│ │ ├── data
│ │ │ └── launch2.mp4
│ │ └── Pixelate.pde
│ ├── Reverse
│ │ ├── data
│ │ │ └── launch2.mp4
│ │ └── Reverse.pde
│ └── Scratch
│ │ ├── data
│ │ └── launch2.mp4
│ │ └── Scratch.pde
└── Capture
│ ├── AsciiVideo
│ ├── data
│ │ └── UniversLTStd-Light-48.vlw
│ └── AsciiVideo.pde
│ ├── ColorSorting
│ ├── Tuple.pde
│ └── ColorSorting.pde
│ ├── HsvSpace
│ ├── Tuple.pde
│ └── HsvSpace.pde
│ ├── CustomPipeline
│ └── CustomPipeline.pde
│ ├── LivePocky
│ └── LivePocky.pde
│ ├── GettingStartedCapture
│ └── GettingStartedCapture.pde
│ ├── SlitScan
│ └── SlitScan.pde
│ ├── Framingham
│ └── Framingham.pde
│ ├── Mirror2
│ └── Mirror2.pde
│ ├── BrightnessTracking
│ └── BrightnessTracking.pde
│ ├── RadialPocky
│ └── RadialPocky.pde
│ ├── BrightnessThresholding
│ └── BrightnessThresholding.pde
│ ├── Mirror
│ └── Mirror.pde
│ ├── TimeDisplacement
│ └── TimeDisplacement.pde
│ ├── FrameDifferencing
│ └── FrameDifferencing.pde
│ ├── BackgroundSubtraction
│ └── BackgroundSubtraction.pde
│ └── Spatiotemporal
│ └── Spatiotemporal.pde
├── .gitignore
├── library.properties
├── .project
├── scripts
├── pack_windows_libs.bat
├── macosx_gstreamer_install.sh
├── pack_linux_libs.sh
├── pack_macosx_libs.sh
├── macosx_relocator.py
└── macosx_remove_extra_libs.py
├── .classpath
├── processing-video.iml
├── ffs.entitlements
├── .settings
├── org.eclipse.jdt.core.prefs.txt
└── org.eclipse.jdt.core.prefs
├── README.md
└── src
└── processing
└── video
├── Environment.java
├── Video.java
├── LibraryLoader.java
├── Movie.java
└── Capture.java
/library/export.txt:
--------------------------------------------------------------------------------
1 | name = Video
2 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: processing
2 | custom: https://processingfoundation.org/
3 |
--------------------------------------------------------------------------------
/library/jna.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/library/jna.jar
--------------------------------------------------------------------------------
/library/gst1-java-core-1.4.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/library/gst1-java-core-1.4.0.jar
--------------------------------------------------------------------------------
/resources/code/ExampleTaglet.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/resources/code/ExampleTaglet.class
--------------------------------------------------------------------------------
/examples/Movie/Loop/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Loop/data/launch2.mp4
--------------------------------------------------------------------------------
/examples/Movie/Speed/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Speed/data/launch2.mp4
--------------------------------------------------------------------------------
/resources/code/ant-contrib-1.0b3.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/resources/code/ant-contrib-1.0b3.jar
--------------------------------------------------------------------------------
/examples/Movie/Frames/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Frames/data/launch2.mp4
--------------------------------------------------------------------------------
/examples/Movie/Pixelate/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Pixelate/data/launch2.mp4
--------------------------------------------------------------------------------
/examples/Movie/Reverse/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Reverse/data/launch2.mp4
--------------------------------------------------------------------------------
/examples/Movie/Scratch/data/launch2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Movie/Scratch/data/launch2.mp4
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | *~
3 |
4 | bin
5 | /dist
6 | /library/macos*
7 | /library/windows*
8 | /library/linux*
9 |
10 | /library/video.jar
11 |
12 | local.properties
13 |
--------------------------------------------------------------------------------
/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/processing-video/HEAD/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw
--------------------------------------------------------------------------------
/library.properties:
--------------------------------------------------------------------------------
1 | name = Video Library for Processing 4
2 | authorList = The Processing Foundation
3 | url = http://processing.org/reference/libraries/video/index.html
4 | category = Video & Vision
5 | sentence = GStreamer-based video library for Processing.
6 | paragraph =
7 | version = 12
8 | prettyVersion = 2.2.2
9 | minRevision = 1281
10 | maxRevision = 0
11 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | processing-video
4 |
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 |
15 | org.eclipse.jdt.core.javanature
16 |
17 |
18 |
--------------------------------------------------------------------------------
/scripts/pack_windows_libs.bat:
--------------------------------------------------------------------------------
1 | SET gst_folder=C:\gstreamer
2 | SET gst_toolchain=msvc
3 | SET lib_folder=..\library
4 |
5 | echo "Copying base gstreamer libs..."
6 | md %lib_folder%\windows-amd64
7 | copy %gst_folder%\1.0\%gst_toolchain%_x86_64\bin\*.dll %lib_folder%\windows-amd64
8 |
9 | echo "Copying gstreamer plugins..."
10 | md %lib_folder%\windows-amd64\gstreamer-1.0
11 | copy %gst_folder%\1.0\%gst_toolchain%_x86_64\lib\gstreamer-1.0\*.dll %lib_folder%\windows-amd64\gstreamer-1.0
12 |
--------------------------------------------------------------------------------
/resources/code/doc.sh:
--------------------------------------------------------------------------------
1 | # a shell script to create a java documentation
2 | # for a processing library.
3 | #
4 | # make changes to the variables below so they
5 | # fit the structure of your library
6 |
7 | # the package name of your library
8 | package=template;
9 |
10 | # source folder location
11 | src=../src;
12 |
13 | # the destination folder of your documentation
14 | dest=../documentation;
15 |
16 |
17 | # compile the java documentation
18 | javadoc -d $dest -stylesheetfile ./stylesheet.css -sourcepath ${src} ${package}
19 |
--------------------------------------------------------------------------------
/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/examples/Movie/Loop/Loop.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Loop.
3 | *
4 | * Shows how to load and play a QuickTime movie file.
5 | *
6 | */
7 |
8 | import processing.video.*;
9 |
10 | Movie movie;
11 |
12 | void setup() {
13 | size(560, 406);
14 | background(0);
15 | // Load and play the video in a loop
16 | movie = new Movie(this, "launch2.mp4");
17 | movie.loop();
18 | }
19 |
20 | void movieEvent(Movie m) {
21 | m.read();
22 | }
23 |
24 | void draw() {
25 | //if (movie.available() == true) {
26 | // movie.read();
27 | //}
28 | image(movie, 0, 0, width, height);
29 | }
--------------------------------------------------------------------------------
/processing-video.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/examples/Movie/Speed/Speed.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Speed.
3 | *
4 | * Use the Movie.speed() method to change
5 | * the playback speed.
6 | *
7 | */
8 |
9 | import processing.video.*;
10 |
11 | Movie mov;
12 |
13 | void setup() {
14 | size(560, 406);
15 | background(0);
16 | mov = new Movie(this, "launch2.mp4");
17 | mov.loop();
18 | }
19 |
20 | void movieEvent(Movie movie) {
21 | mov.read();
22 | }
23 |
24 | void draw() {
25 | image(mov, 0, 0);
26 |
27 | float newSpeed = map(mouseX, 0, width, 0.1, 2);
28 | mov.speed(newSpeed);
29 |
30 | fill(255);
31 | text(nfc(newSpeed, 2) + "X", 10, 30);
32 | }
--------------------------------------------------------------------------------
/ffs.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.cs.allow-jit
6 |
7 | com.apple.security.cs.allow-unsigned-executable-memory
8 |
9 | com.apple.security.cs.disable-executable-page-protection
10 |
11 | com.apple.security.cs.disable-library-validation
12 |
13 | com.apple.security.cs.allow-dyld-environment-variables
14 |
15 |
16 |
--------------------------------------------------------------------------------
/examples/Capture/ColorSorting/Tuple.pde:
--------------------------------------------------------------------------------
1 | // Simple vector class that holds an x,y,z position.
2 |
3 | class Tuple {
4 | float x, y, z;
5 |
6 | Tuple() { }
7 |
8 | Tuple(float x, float y, float z) {
9 | set(x, y, z);
10 | }
11 |
12 | void set(float x, float y, float z) {
13 | this.x = x;
14 | this.y = y;
15 | this.z = z;
16 | }
17 |
18 | void target(Tuple another, float amount) {
19 | float amount1 = 1.0 - amount;
20 | x = x*amount1 + another.x*amount;
21 | y = y*amount1 + another.y*amount;
22 | z = z*amount1 + another.z*amount;
23 | }
24 |
25 | void phil() {
26 | fill(x, y, z);
27 | }
28 | }
29 |
30 |
--------------------------------------------------------------------------------
/.settings/org.eclipse.jdt.core.prefs.txt:
--------------------------------------------------------------------------------
1 | #Sat Nov 12 10:54:16 CST 2011
2 | eclipse.preferences.version=1
3 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
4 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
5 | org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
6 | org.eclipse.jdt.core.compiler.compliance=1.6
7 | org.eclipse.jdt.core.compiler.debug.lineNumber=generate
8 | org.eclipse.jdt.core.compiler.debug.localVariable=generate
9 | org.eclipse.jdt.core.compiler.debug.sourceFile=generate
10 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
11 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
12 | org.eclipse.jdt.core.compiler.source=1.6
13 |
--------------------------------------------------------------------------------
/examples/Capture/HsvSpace/Tuple.pde:
--------------------------------------------------------------------------------
1 | // Simple vector class that holds an x,y,z position.
2 |
3 | class Tuple {
4 | float x, y, z;
5 |
6 | Tuple() { }
7 |
8 | Tuple(float x, float y, float z) {
9 | set(x, y, z);
10 | }
11 |
12 | void set(float x, float y, float z) {
13 | this.x = x;
14 | this.y = y;
15 | this.z = z;
16 | }
17 |
18 | void target(Tuple another, float amount) {
19 | float amount1 = 1.0 - amount;
20 | x = x*amount1 + another.x*amount;
21 | y = y*amount1 + another.y*amount;
22 | z = z*amount1 + another.z*amount;
23 | }
24 |
25 | void phil() {
26 | fill(x, y, z);
27 | }
28 |
29 | void tran() {
30 | translate(x, y, z);
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/scripts/macosx_gstreamer_install.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script downloads the latest GStreamer universal runtime package
4 |
5 | GST_VERSION=${1:-1.20.3}
6 | GST_PKG_URL="https://gstreamer.freedesktop.org/data/pkg/osx"
7 | DOWNLOAD_PATH="."
8 | TARGET_PATH="/"
9 | CURRENT_PATH=`pwd`
10 |
11 | echo "FULL INSTALL..."
12 |
13 | SRC_FILE="$GST_PKG_URL/$GST_VERSION/gstreamer-1.0-$GST_VERSION-universal.pkg"
14 | DEST_FILE="$DOWNLOAD_PATH/gstreamer-1.0-$GST_VERSION-universal.pkg"
15 |
16 | curl $SRC_FILE --output $DEST_FILE
17 |
18 | sudo installer -pkg $DOWNLOAD_PATH/gstreamer-1.0-$GST_VERSION-universal.pkg -target $TARGET_PATH
19 |
20 | rm $DOWNLOAD_PATH/gstreamer-1.0-$GST_VERSION-universal.pkg
21 |
22 | echo "DONE..."
--------------------------------------------------------------------------------
/.settings/org.eclipse.jdt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
3 | org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate
4 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
5 | org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
6 | org.eclipse.jdt.core.compiler.compliance=1.8
7 | org.eclipse.jdt.core.compiler.debug.lineNumber=generate
8 | org.eclipse.jdt.core.compiler.debug.localVariable=generate
9 | org.eclipse.jdt.core.compiler.debug.sourceFile=generate
10 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
11 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
12 | org.eclipse.jdt.core.compiler.source=1.8
13 |
--------------------------------------------------------------------------------
/scripts/pack_linux_libs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Set the appropriate build dist folder in this env variable
4 | meson_build_folder=/home/andres/code/gstreamer/build-1.20/lib/x86_64-linux-gnu
5 |
6 | # Copy the build to the native library folder for linux
7 | mkdir ../library/linux-amd64
8 | cp -a ${meson_build_folder}/* ../library/linux-amd64
9 |
10 | # Remove static .a libs
11 | rm -r ../library/linux-amd64/*.a
12 |
13 | # Remove unncessary folders
14 | rm -r ../library/linux-amd64/cairo
15 | rm -r ../library/linux-amd64/cmake
16 | rm -r ../library/linux-amd64/gio
17 | rm -r ../library/linux-amd64/glib-2.0
18 | rm -r ../library/linux-amd64/gst-validate-launcher
19 | rm -r ../library/linux-amd64/pkgconfig
20 | rm -r ../library/linux-amd64/gstreamer-1.0/validate
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Processing Video
2 | ================
3 |
4 | Repository of the video library for Processing.
5 |
6 | This library comprises classes for movie playback and video capture. It is based on the [gstreamer multimedia framework](http://gstreamer.freedesktop.org/), and uses the [gst1-java-core](https://github.com/gstreamer-java/gst1-java-core) bindings to interface gstreamer from Java.
7 |
8 | The version in master currently uses GStreamer 1.x, thanks to the amazing work by [Gottfried Haider](https://github.com/gohai)
9 |
10 | Please submit all your bug reports and pull requests related to the video library here. Your contribution is really important to keep the Processing project moving forward!
11 |
12 | The video library developers
13 |
14 | 6 January 2018
15 |
--------------------------------------------------------------------------------
/examples/Capture/CustomPipeline/CustomPipeline.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * CustomPipeline
3 | * by Andres Colubri.
4 | *
5 | * Create a Capture object with a pipeline description to
6 | * get video from non-standard sources.
7 | */
8 |
9 | import processing.video.*;
10 |
11 | Capture cam;
12 |
13 | void setup() {
14 | size(640, 480);
15 |
16 | // Start the pipeline description with the "pipeline:" prefix,
17 | // the rest could be any regular GStreamer pipeline as passed to gst-launch:
18 | // https://gstreamer.freedesktop.org/documentation/tools/gst-launch.html?gi-language=c#pipeline-description
19 | cam = new Capture(this, 640, 480, "pipeline:videotestsrc");
20 | cam.start();
21 | }
22 |
23 | void draw() {
24 | if (cam.available() == true) {
25 | cam.read();
26 | }
27 | image(cam, 0, 0, width, height);
28 | }
--------------------------------------------------------------------------------
/examples/Movie/Scratch/Scratch.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Scratch
3 | * by Andres Colubri.
4 | *
5 | * Move the cursor horizontally across the screen to set
6 | * the position in the movie file.
7 | */
8 |
9 | import processing.video.*;
10 |
11 | Movie mov;
12 |
13 | void setup() {
14 | size(560, 406);
15 | background(0);
16 |
17 | mov = new Movie(this, "launch2.mp4");
18 |
19 | // Pausing the video at the first frame.
20 | mov.play();
21 | mov.jump(0);
22 | mov.pause();
23 | }
24 |
25 | void draw() {
26 |
27 | if (mov.available()) {
28 | mov.read();
29 | // A new time position is calculated using the current mouse location:
30 | float f = map(mouseX, 0, width, 0, 1);
31 | float t = mov.duration() * f;
32 | mov.play();
33 | mov.jump(t);
34 | mov.pause();
35 | }
36 |
37 | image(mov, 0, 0);
38 | }
--------------------------------------------------------------------------------
/examples/Capture/LivePocky/LivePocky.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Live Pocky
3 | * by Ben Fry.
4 | *
5 | * Unwrap each frame of live video into a single line of pixels.
6 | */
7 |
8 | import processing.video.*;
9 |
10 | Capture video;
11 | int count;
12 | int writeRow;
13 | int maxRows;
14 | int topRow;
15 | int buffer[];
16 |
17 |
18 | void setup() {
19 | size(600, 400);
20 |
21 | // This the default video input, see the GettingStartedCapture
22 | // example if it creates an error
23 | video = new Capture(this, 320, 240);
24 |
25 | // Start capturing the images from the camera
26 | video.start();
27 |
28 | maxRows = height * 2;
29 | buffer = new int[width * maxRows];
30 | writeRow = height - 1;
31 | topRow = 0;
32 |
33 | background(0);
34 | loadPixels();
35 | }
36 |
37 |
38 | void draw() {
39 | video.loadPixels();
40 | arraycopy(video.pixels, 0, buffer, writeRow * width, width);
41 | writeRow++;
42 | if (writeRow == maxRows) {
43 | writeRow = 0;
44 | }
45 | topRow++;
46 |
47 | for (int y = 0; y < height; y++) {
48 | int row = (topRow + y) % maxRows;
49 | arraycopy(buffer, row * width, g.pixels, y*width, width);
50 | }
51 | updatePixels();
52 | }
53 |
54 |
55 | void captureEvent(Capture c) {
56 | c.read();
57 | }
58 |
--------------------------------------------------------------------------------
/examples/Movie/Pixelate/Pixelate.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Pixelate
3 | * by Hernando Barragan.
4 | *
5 | * Load a QuickTime file and display the video signal
6 | * using rectangles as pixels by reading the values stored
7 | * in the current video frame pixels array.
8 | */
9 |
10 | import processing.video.*;
11 |
12 | int numPixelsWide, numPixelsHigh;
13 | int blockSize = 10;
14 | Movie mov;
15 | color movColors[];
16 |
17 | void setup() {
18 | size(560, 406);
19 | noStroke();
20 | mov = new Movie(this, "launch2.mp4");
21 | mov.loop();
22 | numPixelsWide = width / blockSize;
23 | numPixelsHigh = height / blockSize;
24 | println(numPixelsWide);
25 | movColors = new color[numPixelsWide * numPixelsHigh];
26 | }
27 |
28 | // Display values from movie
29 | void draw() {
30 | if (mov.available() == true) {
31 | mov.read();
32 | mov.loadPixels();
33 | int count = 0;
34 | for (int j = 0; j < numPixelsHigh; j++) {
35 | for (int i = 0; i < numPixelsWide; i++) {
36 | movColors[count] = mov.get(i*blockSize, j*blockSize);
37 | count++;
38 | }
39 | }
40 | }
41 |
42 | background(255);
43 | for (int j = 0; j < numPixelsHigh; j++) {
44 | for (int i = 0; i < numPixelsWide; i++) {
45 | fill(movColors[j*numPixelsWide + i]);
46 | rect(i*blockSize, j*blockSize, blockSize, blockSize);
47 | }
48 | }
49 |
50 | }
--------------------------------------------------------------------------------
/examples/Movie/Reverse/Reverse.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Reverse playback example.
3 | *
4 | * The Movie.speed() method allows to change the playback speed.
5 | * Use negative values for backwards playback. Note that not all
6 | * video formats support backwards playback. This depends on the
7 | * underlying gstreamer plugins used by gsvideo. For example, the
8 | * theora codec does support backward playback, but not so the H264
9 | * codec, at least in its current version.
10 | *
11 | */
12 |
13 | import processing.video.*;
14 |
15 | Movie mov;
16 | boolean speedSet = false;
17 | boolean once = true;
18 |
19 | void setup() {
20 | size(560, 406);
21 | background(0);
22 | mov = new Movie(this, "launch2.mp4");
23 | mov.play();
24 | }
25 |
26 | void movieEvent(Movie m) {
27 | m.read();
28 | if (speedSet == true) {
29 | speedSet = false;
30 | }
31 | }
32 |
33 | void draw() {
34 | if (speedSet == false && once == true) {
35 | // Setting the speed should be done only once,
36 | // this is the reason for the if statement.
37 | speedSet = true;
38 | once = false;
39 | mov.jump(mov.duration());
40 | // -1 means backward playback at normal speed.
41 | mov.speed(-1.0);
42 | // Setting to play again, since the movie stop
43 | // playback once it reached the end.
44 | mov.play();
45 | }
46 | image(mov, 0, 0, width, height);
47 | }
--------------------------------------------------------------------------------
/examples/Capture/GettingStartedCapture/GettingStartedCapture.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Getting Started with Capture.
3 | *
4 | * Reading and displaying an image from an attached Capture device.
5 | */
6 |
7 | import processing.video.*;
8 |
9 | Capture cam;
10 |
11 | void setup() {
12 | size(640, 480);
13 |
14 | String[] cameras = Capture.list();
15 |
16 | if (cameras == null) {
17 | println("Failed to retrieve the list of available cameras, will try the default...");
18 | cam = new Capture(this, 640, 480);
19 | } else if (cameras.length == 0) {
20 | println("There are no cameras available for capture.");
21 | exit();
22 | } else {
23 | println("Available cameras:");
24 | printArray(cameras);
25 |
26 | // The camera can be initialized directly using an element
27 | // from the array returned by list():
28 | cam = new Capture(this, cameras[0]);
29 |
30 | // Or, the camera name can be retrieved from the list (you need
31 | // to enter valid a width, height, and frame rate for the camera).
32 | //cam = new Capture(this, 640, 480, "FaceTime HD Camera (Built-in)", 30);
33 | }
34 |
35 | // Start capturing the images from the camera
36 | cam.start();
37 | }
38 |
39 | void draw() {
40 | if (cam.available() == true) {
41 | cam.read();
42 | }
43 | image(cam, 0, 0, width, height);
44 | // The following does the same as the above image() line, but
45 | // is faster when just drawing the image without any additional
46 | // resizing, transformations, or tint.
47 | //set(0, 0, cam);
48 | }
49 |
--------------------------------------------------------------------------------
/examples/Capture/SlitScan/SlitScan.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Simple Real-Time Slit-Scan Program.
3 | * By Golan Levin.
4 | *
5 | * This demonstration depends on the canvas height being equal
6 | * to the video capture height. If you would prefer otherwise,
7 | * consider using the image copy() function rather than the
8 | * direct pixel-accessing approach I have used here.
9 | */
10 |
11 |
12 | import processing.video.*;
13 |
14 | Capture video;
15 |
16 | int videoSliceX;
17 | int drawPositionX;
18 |
19 | void setup() {
20 | size(600, 240);
21 |
22 | // This the default video input, see the GettingStartedCapture
23 | // example if it creates an error
24 | video = new Capture(this,320, 240);
25 |
26 | // Start capturing the images from the camera
27 | video.start();
28 |
29 | videoSliceX = video.width / 2;
30 | drawPositionX = width - 1;
31 | background(0);
32 | }
33 |
34 |
35 | void draw() {
36 | if (video.available()) {
37 | video.read();
38 | video.loadPixels();
39 |
40 | // Copy a column of pixels from the middle of the video
41 | // To a location moving slowly across the canvas.
42 | loadPixels();
43 | for (int y = 0; y < video.height; y++){
44 | int setPixelIndex = y*width + drawPositionX;
45 | int getPixelIndex = y*video.width + videoSliceX;
46 | pixels[setPixelIndex] = video.pixels[getPixelIndex];
47 | }
48 | updatePixels();
49 |
50 | drawPositionX--;
51 | // Wrap the position back to the beginning if necessary.
52 | if (drawPositionX < 0) {
53 | drawPositionX = width - 1;
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/examples/Capture/Framingham/Framingham.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Framingham
3 | * by Ben Fry.
4 | *
5 | * Show subsequent frames from video input as a grid. Also fun with movie files.
6 | */
7 |
8 |
9 | import processing.video.*;
10 |
11 | Capture video;
12 | int column;
13 | int columnCount;
14 | int lastRow;
15 |
16 | // Buffer used to move all the pixels up
17 | int[] scoot;
18 |
19 |
20 | void setup() {
21 | size(640, 480);
22 |
23 | // This the default video input, see the GettingStartedCapture
24 | // example if it creates an error
25 | video = new Capture(this, 160, 120);
26 |
27 | // Start capturing the images from the camera
28 | video.start();
29 |
30 | column = 0;
31 | columnCount = width / video.width;
32 | int rowCount = height / video.height;
33 | lastRow = rowCount - 1;
34 |
35 | scoot = new int[lastRow*video.height * width];
36 | background(0);
37 | }
38 |
39 |
40 | void draw() {
41 | // By using video.available, only the frame rate need be set inside setup()
42 | if (video.available()) {
43 | video.read();
44 | video.loadPixels();
45 | image(video, video.width*column, video.height*lastRow);
46 | column++;
47 | if (column == columnCount) {
48 | loadPixels();
49 |
50 | // Scoot everybody up one row
51 | arrayCopy(pixels, video.height*width, scoot, 0, scoot.length);
52 | arrayCopy(scoot, 0, pixels, 0, scoot.length);
53 |
54 | // Set the moved row to black
55 | for (int i = scoot.length; i < width*height; i++) {
56 | pixels[i] = #000000;
57 | }
58 | column = 0;
59 | updatePixels();
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/examples/Capture/Mirror2/Mirror2.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Mirror 2
3 | * by Daniel Shiffman.
4 | *
5 | * Each pixel from the video source is drawn as a rectangle with size based on brightness.
6 | */
7 |
8 | import processing.video.*;
9 |
10 | // Size of each cell in the grid
11 | int cellSize = 15;
12 | // Number of columns and rows in our system
13 | int cols, rows;
14 | // Variable for capture device
15 | Capture video;
16 |
17 |
18 | void setup() {
19 | size(640, 480);
20 | // Set up columns and rows
21 | cols = width / cellSize;
22 | rows = height / cellSize;
23 | colorMode(RGB, 255, 255, 255, 100);
24 | rectMode(CENTER);
25 |
26 | // This the default video input, see the GettingStartedCapture
27 | // example if it creates an error
28 | video = new Capture(this, width, height);
29 |
30 | // Start capturing the images from the camera
31 | video.start();
32 |
33 | background(0);
34 | }
35 |
36 |
37 | void draw() {
38 | if (video.available()) {
39 | video.read();
40 | video.loadPixels();
41 |
42 | background(0, 0, 255);
43 |
44 | // Begin loop for columns
45 | for (int i = 0; i < cols;i++) {
46 | // Begin loop for rows
47 | for (int j = 0; j < rows;j++) {
48 |
49 | // Where are we, pixel-wise?
50 | int x = i * cellSize;
51 | int y = j * cellSize;
52 | int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
53 |
54 | // Each rect is colored white with a size determined by brightness
55 | color c = video.pixels[loc];
56 | float sz = (brightness(c) / 255.0) * cellSize;
57 | fill(255);
58 | noStroke();
59 | rect(x + cellSize/2, y + cellSize/2, sz, sz);
60 | }
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/examples/Capture/BrightnessTracking/BrightnessTracking.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Brightness Tracking
3 | * by Golan Levin.
4 | *
5 | * Tracks the brightest pixel in a live video signal.
6 | */
7 |
8 |
9 | import processing.video.*;
10 |
11 | Capture video;
12 |
13 | void setup() {
14 | size(640, 480);
15 | // Uses the default video input, see the reference if this causes an error
16 | video = new Capture(this, width, height);
17 | video.start();
18 | noStroke();
19 | smooth();
20 | }
21 |
22 | void draw() {
23 | if (video.available()) {
24 | video.read();
25 | image(video, 0, 0, width, height); // Draw the webcam video onto the screen
26 | int brightestX = 0; // X-coordinate of the brightest video pixel
27 | int brightestY = 0; // Y-coordinate of the brightest video pixel
28 | float brightestValue = 0; // Brightness of the brightest video pixel
29 | // Search for the brightest pixel: For each row of pixels in the video image and
30 | // for each pixel in the yth row, compute each pixel's index in the video
31 | video.loadPixels();
32 | int index = 0;
33 | for (int y = 0; y < video.height; y++) {
34 | for (int x = 0; x < video.width; x++) {
35 | // Get the color stored in the pixel
36 | int pixelValue = video.pixels[index];
37 | // Determine the brightness of the pixel
38 | float pixelBrightness = brightness(pixelValue);
39 | // If that value is brighter than any previous, then store the
40 | // brightness of that pixel, as well as its (x,y) location
41 | if (pixelBrightness > brightestValue) {
42 | brightestValue = pixelBrightness;
43 | brightestY = y;
44 | brightestX = x;
45 | }
46 | index++;
47 | }
48 | }
49 | // Draw a large, yellow circle at the brightest pixel
50 | fill(255, 204, 0, 128);
51 | ellipse(brightestX, brightestY, 200, 200);
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/examples/Movie/Frames/Frames.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Frames
3 | * by Andres Colubri.
4 | *
5 | * Moves through the video one frame at the time by using the
6 | * arrow keys. It estimates the frame counts using the framerate
7 | * of the movie file, so it might not be exact in some cases.
8 | */
9 |
10 | import processing.video.*;
11 |
12 | Movie mov;
13 | int newFrame = 0;
14 |
15 | void setup() {
16 | size(560, 406);
17 | background(0);
18 | // Load and set the video to play. Setting the video
19 | // in play mode is needed so at least one frame is read
20 | // and we can get duration, size and other information from
21 | // the video stream.
22 | mov = new Movie(this, "launch2.mp4");
23 |
24 | // Pausing the video at the first frame.
25 | mov.play();
26 | mov.jump(0);
27 | mov.pause();
28 | }
29 |
30 | void movieEvent(Movie m) {
31 | m.read();
32 | }
33 |
34 | void draw() {
35 | background(0);
36 | image(mov, 0, 0, width, height);
37 | fill(0);
38 | text(getFrame() + " / " + (getLength() - 1), 10, 30);
39 | }
40 |
41 | void keyPressed() {
42 | if (key == CODED) {
43 | if (keyCode == LEFT) {
44 | if (0 < newFrame) newFrame--;
45 | } else if (keyCode == RIGHT) {
46 | if (newFrame < getLength() - 1) newFrame++;
47 | }
48 | }
49 | setFrame(newFrame);
50 | }
51 |
52 | int getFrame() {
53 | return ceil(mov.time() * 30) - 1;
54 | }
55 |
56 | void setFrame(int n) {
57 | mov.play();
58 |
59 | // The duration of a single frame:
60 | float frameDuration = 1.0 / mov.frameRate;
61 |
62 | // We move to the middle of the frame by adding 0.5:
63 | float where = (n + 0.5) * frameDuration;
64 |
65 | // Taking into account border effects:
66 | float diff = mov.duration() - where;
67 | if (diff < 0) {
68 | where += diff - 0.25 * frameDuration;
69 | }
70 |
71 | mov.jump(where);
72 | mov.pause();
73 | }
74 |
75 | int getLength() {
76 | return int(mov.duration() * mov.frameRate);
77 | }
--------------------------------------------------------------------------------
/examples/Capture/RadialPocky/RadialPocky.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Radial Pocky
3 | * by Ben Fry.
4 | *
5 | * Unwrap each frame of live video into a single line of pixels along a circle
6 | */
7 |
8 | import processing.video.*;
9 |
10 | Capture video;
11 | int videoCount;
12 | int currentAngle;
13 | int pixelCount;
14 | int angleCount = 200; // how many divisions
15 |
16 | int radii[];
17 | int angles[];
18 |
19 |
20 | void setup() {
21 | // size must be set to video.width*video.height*2 in both directions
22 | size(600, 600);
23 |
24 | // This the default video input, see the GettingStartedCapture
25 | // example if it creates an error
26 | video = new Capture(this, 160, 120);
27 |
28 | // Start capturing the images from the camera
29 | video.start();
30 |
31 | videoCount = video.width * video.height;
32 |
33 | pixelCount = width*height;
34 | int centerX = width / 2;
35 | int centerY = height / 2;
36 | radii = new int[pixelCount];
37 | angles = new int[pixelCount];
38 |
39 | int offset = 0;
40 | for (int y = 0; y < height; y++) {
41 | for (int x = 0; x < width; x++) {
42 | int dx = centerX - x;
43 | int dy = centerY - y;
44 |
45 | float angle = atan2(dy, dx);
46 | if (angle < 0) angle += TWO_PI;
47 | angles[offset] = (int) (angleCount * (angle / TWO_PI));
48 |
49 | int radius = (int) mag(dx, dy);
50 | if (radius >= videoCount) {
51 | radius = -1;
52 | angles[offset] = -1;
53 | }
54 | radii[offset] = radius;
55 |
56 | offset++;
57 | }
58 | }
59 | background(0);
60 | }
61 |
62 |
63 | void draw() {
64 | if (video.available()) {
65 | video.read();
66 | video.loadPixels();
67 |
68 | loadPixels();
69 | for (int i = 0; i < pixelCount; i++) {
70 | if (angles[i] == currentAngle) {
71 | pixels[i] = video.pixels[radii[i]];
72 | }
73 | }
74 | updatePixels();
75 |
76 | currentAngle++;
77 | if (currentAngle == angleCount) {
78 | currentAngle = 0;
79 | }
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Brightness Thresholding
3 | * by Golan Levin.
4 | *
5 | * Determines whether a test location (such as the cursor) is contained within
6 | * the silhouette of a dark object.
7 | */
8 |
9 |
10 | import processing.video.*;
11 |
12 | color black = color(0);
13 | color white = color(255);
14 | int numPixels;
15 | Capture video;
16 |
17 | void setup() {
18 | size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
19 | strokeWeight(5);
20 |
21 | // This the default video input, see the GettingStartedCapture
22 | // example if it creates an error
23 | video = new Capture(this, width, height);
24 |
25 | // Start capturing the images from the camera
26 | video.start();
27 |
28 | numPixels = video.width * video.height;
29 | noCursor();
30 | smooth();
31 | }
32 |
33 | void draw() {
34 | if (video.available()) {
35 | video.read();
36 | video.loadPixels();
37 | int threshold = 127; // Set the threshold value
38 | float pixelBrightness; // Declare variable to store a pixel's color
39 | // Turn each pixel in the video frame black or white depending on its brightness
40 | loadPixels();
41 | for (int i = 0; i < numPixels; i++) {
42 | pixelBrightness = brightness(video.pixels[i]);
43 | if (pixelBrightness > threshold) { // If the pixel is brighter than the
44 | pixels[i] = white; // threshold value, make it white
45 | }
46 | else { // Otherwise,
47 | pixels[i] = black; // make it black
48 | }
49 | }
50 | updatePixels();
51 | // Test a location to see where it is contained. Fetch the pixel at the test
52 | // location (the cursor), and compute its brightness
53 | int testValue = get(mouseX, mouseY);
54 | float testBrightness = brightness(testValue);
55 | if (testBrightness > threshold) { // If the test location is brighter than
56 | fill(black); // the threshold set the fill to black
57 | }
58 | else { // Otherwise,
59 | fill(white); // set the fill to white
60 | }
61 | ellipse(mouseX, mouseY, 20, 20);
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/examples/Capture/Mirror/Mirror.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Mirror
3 | * by Daniel Shiffman.
4 | *
5 | * Each pixel from the video source is drawn as a rectangle with rotation based on brightness.
6 | */
7 |
8 | import processing.video.*;
9 |
10 |
11 | // Size of each cell in the grid
12 | int cellSize = 20;
13 | // Number of columns and rows in our system
14 | int cols, rows;
15 | // Variable for capture device
16 | Capture video;
17 |
18 |
19 | void setup() {
20 | size(640, 480);
21 | frameRate(30);
22 | cols = width / cellSize;
23 | rows = height / cellSize;
24 | colorMode(RGB, 255, 255, 255, 100);
25 |
26 | // This the default video input, see the GettingStartedCapture
27 | // example if it creates an error
28 | video = new Capture(this, width, height);
29 |
30 | // Start capturing the images from the camera
31 | video.start();
32 |
33 | background(0);
34 | }
35 |
36 |
37 | void draw() {
38 | if (video.available()) {
39 | video.read();
40 | video.loadPixels();
41 |
42 | // Begin loop for columns
43 | for (int i = 0; i < cols; i++) {
44 | // Begin loop for rows
45 | for (int j = 0; j < rows; j++) {
46 |
47 | // Where are we, pixel-wise?
48 | int x = i*cellSize;
49 | int y = j*cellSize;
50 | int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
51 |
52 | float r = red(video.pixels[loc]);
53 | float g = green(video.pixels[loc]);
54 | float b = blue(video.pixels[loc]);
55 | // Make a new color with an alpha component
56 | color c = color(r, g, b, 75);
57 |
58 | // Code for drawing a single rect
59 | // Using translate in order for rotation to work properly
60 | pushMatrix();
61 | translate(x+cellSize/2, y+cellSize/2);
62 | // Rotation formula based on brightness
63 | rotate((2 * PI * brightness(c) / 255.0));
64 | rectMode(CENTER);
65 | fill(c);
66 | noStroke();
67 | // Rects are larger than the cell for some overlap
68 | rect(0, 0, cellSize+6, cellSize+6);
69 | popMatrix();
70 | }
71 | }
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/examples/Capture/TimeDisplacement/TimeDisplacement.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Time Displacement
3 | * by David Muth
4 | *
5 | * Keeps a buffer of video frames in memory and displays pixel rows
6 | * taken from consecutive frames distributed over the y-axis
7 | */
8 |
9 | import processing.video.*;
10 |
11 | Capture video;
12 | int signal = 0;
13 |
14 | //the buffer for storing video frames
15 | ArrayList frames = new ArrayList();
16 |
17 | void setup() {
18 | size(640, 480);
19 |
20 | // This the default video input, see the GettingStartedCapture
21 | // example if it creates an error
22 | video = new Capture(this, width, height);
23 |
24 | // Start capturing the images from the camera
25 | video.start();
26 | }
27 |
28 | void captureEvent(Capture camera) {
29 | camera.read();
30 |
31 | // Copy the current video frame into an image, so it can be stored in the buffer
32 | PImage img = createImage(width, height, RGB);
33 | video.loadPixels();
34 | arrayCopy(video.pixels, img.pixels);
35 |
36 | frames.add(img);
37 |
38 | // Once there are enough frames, remove the oldest one when adding a new one
39 | if (frames.size() > height/4) {
40 | frames.remove(0);
41 | }
42 | }
43 |
44 | void draw() {
45 | // Set the image counter to 0
46 | int currentImage = 0;
47 |
48 | loadPixels();
49 |
50 | // Begin a loop for displaying pixel rows of 4 pixels height
51 | for (int y = 0; y < video.height; y+=4) {
52 | // Go through the frame buffer and pick an image, starting with the oldest one
53 | if (currentImage < frames.size()) {
54 | PImage img = (PImage)frames.get(currentImage);
55 |
56 | if (img != null) {
57 | img.loadPixels();
58 |
59 | // Put 4 rows of pixels on the screen
60 | for (int x = 0; x < video.width; x++) {
61 | pixels[x + y * width] = img.pixels[x + y * video.width];
62 | pixels[x + (y + 1) * width] = img.pixels[x + (y + 1) * video.width];
63 | pixels[x + (y + 2) * width] = img.pixels[x + (y + 2) * video.width];
64 | pixels[x + (y + 3) * width] = img.pixels[x + (y + 3) * video.width];
65 | }
66 | }
67 |
68 | // Increase the image counter
69 | currentImage++;
70 |
71 | } else {
72 | break;
73 | }
74 | }
75 |
76 | updatePixels();
77 |
78 | // For recording an image sequence
79 | //saveFrame("frame-####.jpg");
80 | }
81 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/examples/Capture/FrameDifferencing/FrameDifferencing.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Frame Differencing
3 | * by Golan Levin.
4 | *
5 | * Quantify the amount of movement in the video frame using frame-differencing.
6 | */
7 |
8 |
9 | import processing.video.*;
10 |
11 | int numPixels;
12 | int[] previousFrame;
13 | Capture video;
14 |
15 | void setup() {
16 | size(640, 480);
17 |
18 | // This the default video input, see the GettingStartedCapture
19 | // example if it creates an error
20 | video = new Capture(this, width, height);
21 |
22 | // Start capturing the images from the camera
23 | video.start();
24 |
25 | numPixels = video.width * video.height;
26 | // Create an array to store the previously captured frame
27 | previousFrame = new int[numPixels];
28 | loadPixels();
29 | }
30 |
31 | void draw() {
32 | if (video.available()) {
33 | // When using video to manipulate the screen, use video.available() and
34 | // video.read() inside the draw() method so that it's safe to draw to the screen
35 | video.read(); // Read the new frame from the camera
36 | video.loadPixels(); // Make its pixels[] array available
37 |
38 | int movementSum = 0; // Amount of movement in the frame
39 | for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
40 | color currColor = video.pixels[i];
41 | color prevColor = previousFrame[i];
42 | // Extract the red, green, and blue components from current pixel
43 | int currR = (currColor >> 16) & 0xFF; // Like red(), but faster
44 | int currG = (currColor >> 8) & 0xFF;
45 | int currB = currColor & 0xFF;
46 | // Extract red, green, and blue components from previous pixel
47 | int prevR = (prevColor >> 16) & 0xFF;
48 | int prevG = (prevColor >> 8) & 0xFF;
49 | int prevB = prevColor & 0xFF;
50 | // Compute the difference of the red, green, and blue values
51 | int diffR = abs(currR - prevR);
52 | int diffG = abs(currG - prevG);
53 | int diffB = abs(currB - prevB);
54 | // Add these differences to the running tally
55 | movementSum += diffR + diffG + diffB;
56 | // Render the difference image to the screen
57 | pixels[i] = color(diffR, diffG, diffB);
58 | // The following line is much faster, but more confusing to read
59 | //pixels[i] = 0xff000000 | (diffR << 16) | (diffG << 8) | diffB;
60 | // Save the current color into the 'previous' buffer
61 | previousFrame[i] = currColor;
62 | }
63 | // To prevent flicker from frames that are all black (no movement),
64 | // only update the screen if the image has changed.
65 | if (movementSum > 0) {
66 | updatePixels();
67 | println(movementSum); // Print the total amount of movement to the console
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/src/processing/video/Environment.java:
--------------------------------------------------------------------------------
1 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2 |
3 | /*
4 | Part of the Processing project - http://processing.org
5 |
6 | Copyright (c) 2012-22 The Processing Foundation
7 | Copyright (c) 2004-12 Ben Fry and Casey Reas
8 |
9 | Based on from:
10 | http://blog.quirk.es/2009/11/setting-environment-variables-in-java.html
11 |
12 | This library is free software; you can redistribute it and/or
13 | modify it under the terms of the GNU Lesser General Public
14 | License as published by the Free Software Foundation; either
15 | version 2.1 of the License, or (at your option) any later version.
16 |
17 | This library is distributed in the hope that it will be useful,
18 | but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | Lesser General Public License for more details.
21 |
22 | You should have received a copy of the GNU Lesser General
23 | Public License along with this library; if not, write to the
24 | Free Software Foundation, Inc., 59 Temple Place, Suite 330,
25 | Boston, MA 02111-1307 USA
26 | */
27 |
28 | package processing.video;
29 |
30 | import com.sun.jna.Library;
31 | import com.sun.jna.Native;
32 |
33 | public class Environment {
34 |
35 | public interface WinLibC extends Library {
36 | public int _putenv(String name);
37 | public String _getenv(String name);
38 | }
39 |
40 | public interface UnixLibC extends Library {
41 | public int setenv(String name, String value, int overwrite);
42 | public int unsetenv(String name);
43 | public String getenv(String name);
44 | }
45 |
46 | static public class POSIX {
47 | static Object libc;
48 | static {
49 | if (System.getProperty("os.name").contains("Windows")) {
50 | libc = Native.load("msvcrt", WinLibC.class);
51 | } else {
52 | libc = Native.load("c", UnixLibC.class);
53 | }
54 | }
55 |
56 | public int setenv(String name, String value, boolean overwrite) {
57 | if (libc instanceof UnixLibC) {
58 | return ((UnixLibC)libc).setenv(name, value, overwrite?1:0);
59 | }
60 | else {
61 | return ((WinLibC)libc)._putenv(name + "=" + value);
62 | }
63 | }
64 |
65 | public int unsetenv(String name) {
66 | if (libc instanceof UnixLibC) {
67 | return ((UnixLibC)libc).unsetenv(name);
68 | }
69 | else {
70 | return ((WinLibC)libc)._putenv(name + "=");
71 | }
72 | }
73 |
74 | public String getenv(String name) {
75 | if (libc instanceof UnixLibC) {
76 | return ((UnixLibC)libc).getenv(name);
77 | }
78 | else {
79 | return ((WinLibC)libc)._getenv(name);
80 | }
81 | }
82 | }
83 |
84 | static POSIX libc = new POSIX();
85 | }
86 |
--------------------------------------------------------------------------------
/scripts/pack_macosx_libs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script bundles the binaries from a GStreamer system installation on Mac
4 | # into the Processing video library. It relocates the binaries so they can be
5 | # used from Processing without requiring a GStreamer installation on the users'
6 | # computers, as detailed in the GStreamer documentation:
7 | # https://gstreamer.freedesktop.org/documentation/deploying/mac-osx.html?gi-language=c#relocation-of-gstreamer-in-os-x
8 |
9 | # Original version by Gottfried Haider.
10 | # https://github.com/gohai/processing-glvideo/blob/master/src/native/Makefile
11 |
12 | gst_minor_ver=${1:-16}
13 |
14 | if [ ${gst_minor_ver} -gt 14 ]
15 | then
16 | dep_path="@rpath/lib/"
17 | else
18 | dep_path="/Library/Frameworks/GStreamer.framework/Versions/1.0/lib/"
19 | fi
20 |
21 | gst_folder=/Library/Frameworks/GStreamer.framework/Versions/1.0/lib
22 | lib_folder_univ=../library/macos-universal
23 | lib_folder_x86_64=../library/macos-x86_64
24 | lib_folder_aarch64=../library/macos-aarch64
25 |
26 | echo "Copying base gstreamer libs..."
27 | mkdir -p ${lib_folder_univ}
28 | cp ${gst_folder}/*.dylib ${lib_folder_univ}
29 |
30 | echo "Relocating dependencies in base libs..."
31 | ./macosx_relocator.py ${lib_folder_univ} ${dep_path} "@loader_path/"
32 |
33 | echo "Copying gstreamer plugins..."
34 | mkdir -p ${lib_folder_univ}/gstreamer-1.0
35 | cp ${gst_folder}/gstreamer-1.0/* ${lib_folder_univ}/gstreamer-1.0
36 |
37 | # Remove plugins that give runtime errors:
38 | rm -f ${lib_folder_univ}/gstreamer-1.0/libgstsrt.dylib
39 | rm -f ${lib_folder_univ}/gstreamer-1.0/libgstsrtp.dylib
40 |
41 | # These seem okay now (with GStreamer 1.20.x)
42 | # rm -f ${lib_folder_univ}/gstreamer-1.0/libgsthls.so
43 | # rm -f ${lib_folder_univ}/gstreamer-1.0/libgstopenjpeg.so
44 |
45 | echo "Relocating dependencies in gstreamer plugins..."
46 | ./macosx_relocator.py ${lib_folder_univ}/gstreamer-1.0 ${dep_path} "@loader_path/../"
47 |
48 | echo "Removing unused dependencies..."
49 | ./macosx_remove_extra_libs.py
50 |
51 | echo "Extracting x86_64 and aarch64 native libraries..."
52 |
53 | mkdir -p ${lib_folder_x86_64}
54 | mkdir -p ${lib_folder_aarch64}
55 | for file in ${lib_folder_univ}/*.dylib; do
56 | fn="$(basename ${file})"
57 | lipo ${file} -thin x86_64 -output ${lib_folder_x86_64}/${fn};
58 | lipo ${file} -thin arm64 -output ${lib_folder_aarch64}/${fn};
59 | done
60 |
61 | mkdir -p ${lib_folder_x86_64}/gstreamer-1.0
62 | mkdir -p ${lib_folder_aarch64}/gstreamer-1.0
63 | for file in ${lib_folder_univ}/gstreamer-1.0/*.dylib; do
64 | fn="$(basename ${file})"
65 | lipo ${file} -thin x86_64 -output ${lib_folder_x86_64}/gstreamer-1.0/${fn};
66 | lipo ${file} -thin arm64 -output ${lib_folder_aarch64}/gstreamer-1.0/${fn};
67 | done
68 |
69 | echo "Removing universal native libraries..."
70 | rm -rf ${lib_folder_univ}
71 |
72 | echo "Done."
--------------------------------------------------------------------------------
/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Background Subtraction
3 | * by Golan Levin.
4 | *
5 | * Detect the presence of people and objects in the frame using a simple
6 | * background-subtraction technique. To initialize the background, press a key.
7 | */
8 |
9 |
10 | import processing.video.*;
11 |
12 | int numPixels;
13 | int[] backgroundPixels;
14 | Capture video;
15 |
16 | void setup() {
17 | size(640, 480);
18 |
19 | // This the default video input, see the GettingStartedCapture
20 | // example if it creates an error
21 | //video = new Capture(this, 160, 120);
22 | video = new Capture(this, width, height);
23 |
24 | // Start capturing the images from the camera
25 | video.start();
26 |
27 | numPixels = video.width * video.height;
28 | // Create array to store the background image
29 | backgroundPixels = new int[numPixels];
30 | // Make the pixels[] array available for direct manipulation
31 | loadPixels();
32 | }
33 |
34 | void draw() {
35 | if (video.available()) {
36 | video.read(); // Read a new video frame
37 | video.loadPixels(); // Make the pixels of video available
38 | // Difference between the current frame and the stored background
39 | int presenceSum = 0;
40 | for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
41 | // Fetch the current color in that location, and also the color
42 | // of the background in that spot
43 | color currColor = video.pixels[i];
44 | color bkgdColor = backgroundPixels[i];
45 | // Extract the red, green, and blue components of the current pixel's color
46 | int currR = (currColor >> 16) & 0xFF;
47 | int currG = (currColor >> 8) & 0xFF;
48 | int currB = currColor & 0xFF;
49 | // Extract the red, green, and blue components of the background pixel's color
50 | int bkgdR = (bkgdColor >> 16) & 0xFF;
51 | int bkgdG = (bkgdColor >> 8) & 0xFF;
52 | int bkgdB = bkgdColor & 0xFF;
53 | // Compute the difference of the red, green, and blue values
54 | int diffR = abs(currR - bkgdR);
55 | int diffG = abs(currG - bkgdG);
56 | int diffB = abs(currB - bkgdB);
57 | // Add these differences to the running tally
58 | presenceSum += diffR + diffG + diffB;
59 | // Render the difference image to the screen
60 | pixels[i] = color(diffR, diffG, diffB);
61 | // The following line does the same thing much faster, but is more technical
62 | //pixels[i] = 0xFF000000 | (diffR << 16) | (diffG << 8) | diffB;
63 | }
64 | updatePixels(); // Notify that the pixels[] array has changed
65 | println(presenceSum); // Print out the total amount of movement
66 | }
67 | }
68 |
69 | // When a key is pressed, capture the background image into the backgroundPixels
70 | // buffer, by copying each of the current frame's pixels into it.
71 | void keyPressed() {
72 | video.loadPixels();
73 | arraycopy(video.pixels, backgroundPixels);
74 | }
75 |
--------------------------------------------------------------------------------
/examples/Capture/ColorSorting/ColorSorting.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Color Sorting
3 | * by Ben Fry.
4 | *
5 | * Example that sorts all colors from the incoming video
6 | * and arranges them into vertical bars.
7 | */
8 |
9 |
10 | import processing.video.*;
11 |
12 | Capture video;
13 | boolean cheatScreen;
14 |
15 | Tuple[] captureColors;
16 | Tuple[] drawColors;
17 | int[] bright;
18 |
19 | // How many pixels to skip in either direction
20 | int increment = 5;
21 |
22 | void setup() {
23 | size(800, 600);
24 |
25 | // This the default video input, see the GettingStartedCapture
26 | // example if it creates an error
27 | video = new Capture(this, 160, 120);
28 |
29 | // Start capturing the images from the camera
30 | video.start();
31 |
32 | int count = (video.width * video.height) / (increment * increment);
33 | bright = new int[count];
34 | captureColors = new Tuple[count];
35 | drawColors = new Tuple[count];
36 | for (int i = 0; i < count; i++) {
37 | captureColors[i] = new Tuple();
38 | drawColors[i] = new Tuple(0.5, 0.5, 0.5);
39 | }
40 | }
41 |
42 |
43 | void draw() {
44 | if (video.available()) {
45 | video.read();
46 | video.loadPixels();
47 |
48 | background(0);
49 | noStroke();
50 |
51 | int index = 0;
52 | for (int j = 0; j < video.height; j += increment) {
53 | for (int i = 0; i < video.width; i += increment) {
54 | int pixelColor = video.pixels[j*video.width + i];
55 |
56 | int r = (pixelColor >> 16) & 0xff;
57 | int g = (pixelColor >> 8) & 0xff;
58 | int b = pixelColor & 0xff;
59 |
60 | // Technically would be sqrt of the following, but no need to do
61 | // sqrt before comparing the elements since we're only ordering
62 | bright[index] = r*r + g*g + b*b;
63 | captureColors[index].set(r, g, b);
64 |
65 | index++;
66 | }
67 | }
68 | sort(index, bright, captureColors);
69 |
70 | beginShape(QUAD_STRIP);
71 | for (int i = 0; i < index; i++) {
72 | drawColors[i].target(captureColors[i], 0.1);
73 | drawColors[i].phil();
74 |
75 | float x = map(i, 0, index, 0, width);
76 | vertex(x, 0);
77 | vertex(x, height);
78 | }
79 | endShape();
80 |
81 | if (cheatScreen) {
82 | //image(video, 0, height - video.height);
83 | // Faster method of displaying pixels array on screen
84 | set(0, height - video.height, video);
85 | }
86 | }
87 | }
88 |
89 |
90 | void keyPressed() {
91 | if (key == 'g') {
92 | saveFrame();
93 | } else if (key == 'c') {
94 | cheatScreen = !cheatScreen;
95 | }
96 | }
97 |
98 |
99 | // Functions to handle sorting the color data
100 |
101 |
102 | void sort(int length, int[] a, Tuple[] stuff) {
103 | sortSub(a, stuff, 0, length - 1);
104 | }
105 |
106 |
107 | void sortSwap(int[] a, Tuple[] stuff, int i, int j) {
108 | int T = a[i];
109 | a[i] = a[j];
110 | a[j] = T;
111 |
112 | Tuple v = stuff[i];
113 | stuff[i] = stuff[j];
114 | stuff[j] = v;
115 | }
116 |
117 |
118 | void sortSub(int[] a, Tuple[] stuff, int lo0, int hi0) {
119 | int lo = lo0;
120 | int hi = hi0;
121 | int mid;
122 |
123 | if (hi0 > lo0) {
124 | mid = a[(lo0 + hi0) / 2];
125 |
126 | while (lo <= hi) {
127 | while ((lo < hi0) && (a[lo] < mid)) {
128 | ++lo;
129 | }
130 | while ((hi > lo0) && (a[hi] > mid)) {
131 | --hi;
132 | }
133 | if (lo <= hi) {
134 | sortSwap(a, stuff, lo, hi);
135 | ++lo;
136 | --hi;
137 | }
138 | }
139 |
140 | if (lo0 < hi)
141 | sortSub(a, stuff, lo0, hi);
142 |
143 | if (lo < hi0)
144 | sortSub(a, stuff, lo, hi0);
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/examples/Capture/Spatiotemporal/Spatiotemporal.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * Spatiotemporal
3 | * by David Muth
4 | *
5 | * Records a number of video frames into memory, then plays back the video
6 | * buffer by turning the time axis into the x-axis and vice versa
7 | */
8 |
9 | import processing.video.*;
10 |
11 | Capture video;
12 | int signal = 0;
13 |
14 | //the buffer for storing video frames
15 | ArrayList frames;
16 |
17 | //different program modes for recording and playback
18 | int mode = 0;
19 | int MODE_NEWBUFFER = 0;
20 | int MODE_RECORDING = 1;
21 | int MODE_PLAYBACK = 2;
22 |
23 | int currentX = 0;
24 |
25 | void setup() {
26 | size(640, 480);
27 |
28 | // This the default video input, see the GettingStartedCapture
29 | // example if it creates an error
30 | video = new Capture(this, width, height);
31 |
32 | // Start capturing the images from the camera
33 | video.start();
34 | }
35 |
36 | void captureEvent(Capture c) {
37 | c.read();
38 |
39 | //create a new buffer in case one is needed
40 | if (mode == MODE_NEWBUFFER) {
41 | frames = new ArrayList();
42 | mode = MODE_RECORDING;
43 | }
44 |
45 | //record into the buffer until there are enough frames
46 | if (mode == MODE_RECORDING) {
47 | //copy the current video frame into an image, so it can be stored in the buffer
48 | PImage img = createImage(width, height, RGB);
49 | video.loadPixels();
50 | arrayCopy(video.pixels, img.pixels);
51 |
52 | frames.add(img);
53 |
54 | //in case enough frames have been recorded, switch to playback mode
55 | if (frames.size() >= width) {
56 | mode = MODE_PLAYBACK;
57 | }
58 | }
59 | }
60 |
61 | void draw() {
62 | loadPixels();
63 |
64 | //code for the recording mode
65 | if (mode == MODE_RECORDING) {
66 | //set the image counter to 0
67 | int currentImage = 0;
68 |
69 | //begin a loop for displaying pixel columns
70 | for (int x = 0; x < video.width; x++) {
71 | //go through the frame buffer and pick an image using the image counter
72 | if (currentImage < frames.size()) {
73 | PImage img = (PImage)frames.get(currentImage);
74 |
75 | //display a pixel column of the current image
76 | if (img != null) {
77 | img.loadPixels();
78 |
79 | for (int y = 0; y < video.height; y++) {
80 | pixels[x + y * width] = img.pixels[x + y * video.width];
81 | }
82 | }
83 |
84 | //increase the image counter
85 | currentImage++;
86 |
87 | }
88 | else {
89 | break;
90 | }
91 | }
92 | }
93 |
94 | //code for displaying the spatiotemporal transformation
95 | if (mode == MODE_PLAYBACK) {
96 |
97 | //begin a loop for displaying pixel columns
98 | for (int x = 0; x < video.width; x++) {
99 | //get an image from the buffer using loopcounter x as the index
100 | PImage img = (PImage)frames.get(x);
101 |
102 | if (img != null) {
103 | img.loadPixels();
104 |
105 | //pick the same column from each image for display,
106 | //then distribute the columns over the x-axis on the screen
107 | for(int y = 0; y < video.height; y++) {
108 | pixels[x + y * width] = img.pixels[currentX + y * video.width];
109 | }
110 | }
111 | }
112 |
113 | //a different column shall be used next time draw() is being called
114 | currentX++;
115 |
116 | //if the end of the buffer is reached
117 | if(currentX >= video.width) {
118 | //create a new buffer when the next video frame arrives
119 | mode = MODE_NEWBUFFER;
120 | //reset the column counter
121 | currentX = 0;
122 | }
123 | }
124 |
125 | updatePixels();
126 | }
127 |
128 |
129 |
130 |
131 |
132 |
--------------------------------------------------------------------------------
/examples/Capture/AsciiVideo/AsciiVideo.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * ASCII Video
3 | * by Ben Fry.
4 | *
5 | *
6 | * Text characters have been used to represent images since the earliest computers.
7 | * This sketch is a simple homage that re-interprets live video as ASCII text.
8 | * See the keyPressed() function for more options, like changing the font size.
9 | */
10 |
11 | import processing.video.*;
12 |
13 | Capture video;
14 | boolean cheatScreen;
15 |
16 | // Characters sorted according to their visual density
17 | String letterOrder =
18 | " .`-_':,;^=+/\"|)\\<>)iv%xclrs{*}I?!][1taeo7zjLu" +
19 | "nT#JCwfy325Fp6mqSghVd4EgXPGZbYkOA&8U$@KHDBWNMR0Q";
20 | char[] letters;
21 |
22 | float[] bright;
23 |
24 | PFont font;
25 | float fontSize = 1.5;
26 |
27 |
28 | void setup() {
29 | size(640, 480);
30 |
31 | // This the default video input, see the GettingStartedCapture
32 | // example if it creates an error
33 | video = new Capture(this, 160, 120);
34 |
35 | // Start capturing the images from the camera
36 | video.start();
37 |
38 | int count = video.width * video.height;
39 | //println(count);
40 |
41 | font = loadFont("UniversLTStd-Light-48.vlw");
42 |
43 | // for the 256 levels of brightness, distribute the letters across
44 | // the an array of 256 elements to use for the lookup
45 | letters = new char[256];
46 | for (int i = 0; i < 256; i++) {
47 | int index = int(map(i, 0, 256, 0, letterOrder.length()));
48 | letters[i] = letterOrder.charAt(index);
49 | }
50 |
51 | // current brightness for each point
52 | bright = new float[count];
53 | for (int i = 0; i < count; i++) {
54 | // set each brightness at the midpoint to start
55 | bright[i] = 128;
56 | }
57 | }
58 |
59 |
60 | void captureEvent(Capture c) {
61 | c.read();
62 | }
63 |
64 |
65 | void draw() {
66 | background(0);
67 |
68 | pushMatrix();
69 |
70 | float hgap = width / float(video.width);
71 | float vgap = height / float(video.height);
72 |
73 | scale(max(hgap, vgap) * fontSize);
74 | textFont(font, fontSize);
75 |
76 | int index = 0;
77 | video.loadPixels();
78 | for (int y = 1; y < video.height; y++) {
79 |
80 | // Move down for next line
81 | translate(0, 1.0 / fontSize);
82 |
83 | pushMatrix();
84 | for (int x = 0; x < video.width; x++) {
85 | int pixelColor = video.pixels[index];
86 | // Faster method of calculating r, g, b than red(), green(), blue()
87 | int r = (pixelColor >> 16) & 0xff;
88 | int g = (pixelColor >> 8) & 0xff;
89 | int b = pixelColor & 0xff;
90 |
91 | // Another option would be to properly calculate brightness as luminance:
92 | // luminance = 0.3*red + 0.59*green + 0.11*blue
93 | // Or you could instead red + green + blue, and make the the values[] array
94 | // 256*3 elements long instead of just 256.
95 | int pixelBright = max(r, g, b);
96 |
97 | // The 0.1 value is used to damp the changes so that letters flicker less
98 | float diff = pixelBright - bright[index];
99 | bright[index] += diff * 0.1;
100 |
101 | fill(pixelColor);
102 | int num = int(bright[index]);
103 | text(letters[num], 0, 0);
104 |
105 | // Move to the next pixel
106 | index++;
107 |
108 | // Move over for next character
109 | translate(1.0 / fontSize, 0);
110 | }
111 | popMatrix();
112 | }
113 | popMatrix();
114 |
115 | if (cheatScreen) {
116 | //image(video, 0, height - video.height);
117 | // set() is faster than image() when drawing untransformed images
118 | set(0, height - video.height, video);
119 | }
120 | }
121 |
122 |
123 | /**
124 | * Handle key presses:
125 | * 'c' toggles the cheat screen that shows the original image in the corner
126 | * 'g' grabs an image and saves the frame to a tiff image
127 | * 'f' and 'F' increase and decrease the font size
128 | */
129 | void keyPressed() {
130 | switch (key) {
131 | case 'g': saveFrame(); break;
132 | case 'c': cheatScreen = !cheatScreen; break;
133 | case 'f': fontSize *= 1.1; break;
134 | case 'F': fontSize *= 0.9; break;
135 | }
136 | }
137 |
--------------------------------------------------------------------------------
/resources/stylesheet.css:
--------------------------------------------------------------------------------
1 | /* Javadoc style sheet */
2 | /* Define colors, fonts and other style attributes here to override the defaults */
3 | /* processingLibs style by andreas schlegel, sojamo */
4 |
5 |
6 | body {
7 | margin : 0;
8 | padding : 0;
9 | padding-left : 10px;
10 | padding-right : 8px;
11 | background-color : #FFFFFF;
12 | font-family : Verdana, Geneva, Arial, Helvetica, sans-serif;
13 | font-size : 100%;
14 | font-size : 0.7em;
15 | font-weight : normal;
16 | line-height : normal;
17 | margin-bottom:30px;
18 | }
19 |
20 |
21 |
22 |
23 | /* Headings */
24 | h1, h2, h3, h4, h5, th {
25 | font-family :Arial, Helvetica, sans-serif;
26 | font-size:1.2em;
27 | }
28 |
29 |
30 | p {
31 | font-size : 1em;
32 | width:80%;
33 | }
34 |
35 | pre, code {
36 | font-family : "Courier New", Courier, monospace;
37 | font-size : 12px;
38 | line-height : normal;
39 | }
40 |
41 |
42 |
43 | table {
44 | border:0;
45 | margin-bottom:10px;
46 | margin-top:10px;
47 | }
48 |
49 |
50 | tr, td {
51 | border-top: 0px solid;
52 | border-left: 0px solid;
53 | padding-top:8px;
54 | padding-bottom:8px;
55 | }
56 |
57 |
58 |
59 | hr {
60 | border:0;
61 | height:1px;
62 | padding:0;
63 | margin:0;
64 | margin-bottom:4px;
65 |
66 | }
67 |
68 |
69 |
70 | dd, th, td, font {
71 | font-size:1.0em;
72 | line-height:1.0em;
73 | }
74 |
75 |
76 |
77 | dt {
78 | margin-bottom:0px;
79 | }
80 |
81 |
82 |
83 | dd {
84 | margin-top:2px;
85 | margin-bottom:4px;
86 | }
87 |
88 |
89 |
90 | a {
91 | text-decoration: underline;
92 | font-weight: normal;
93 | }
94 |
95 | a:hover,
96 | a:active {
97 | text-decoration: underline;
98 | font-weight: normal;
99 | }
100 |
101 | a:visited,
102 | a:link:visited {
103 | text-decoration: underline;
104 | font-weight: normal;
105 | }
106 |
107 |
108 | img {
109 | border: 0px solid #000000;
110 | }
111 |
112 |
113 |
114 | /* Navigation bar fonts */
115 | .NavBarCell1 {
116 | border:0;
117 | }
118 |
119 | .NavBarCell1Rev {
120 | border:0;
121 | }
122 |
123 | .NavBarFont1 {
124 | font-family: Arial, Helvetica, sans-serif;
125 | font-size:1.1em;
126 | }
127 |
128 |
129 | .NavBarFont1 b {
130 | font-weight:normal;
131 | }
132 |
133 |
134 |
135 | .NavBarFont1:after, .NavBarFont1Rev:after {
136 | font-weight:normal;
137 | content: " \\";
138 | }
139 |
140 |
141 | .NavBarFont1Rev {
142 | font-family: Arial, Helvetica, sans-serif;
143 | font-size:1.1em;
144 | }
145 |
146 | .NavBarFont1Rev b {
147 | font-family: Arial, Helvetica, sans-serif;
148 | font-size:1.1em;
149 | font-weight:normal;
150 | }
151 |
152 | .NavBarCell2 {
153 | font-family: Arial, Helvetica, sans-serif;
154 | }
155 |
156 | .NavBarCell3 {
157 | font-family: Arial, Helvetica, sans-serif;
158 | }
159 |
160 |
161 |
162 | font.FrameItemFont {
163 | font-family: Helvetica, Arial, sans-serif;
164 | font-size:1.1em;
165 | line-height:1.1em;
166 | }
167 |
168 | font.FrameHeadingFont {
169 | font-family: Helvetica, Arial, sans-serif;
170 | line-height:32px;
171 | }
172 |
173 | /* Font used in left-hand frame lists */
174 | .FrameTitleFont {
175 | font-family: Helvetica, Arial, sans-serif
176 | }
177 |
178 |
179 | .toggleList {
180 | padding:0;
181 | margin:0;
182 | margin-top:12px;
183 | }
184 |
185 | .toggleList dt {
186 | font-weight:bold;
187 | font-size:12px;
188 | font-family:arial,sans-serif;
189 | padding:0px;
190 | margin:10px 0px 10px 0px;
191 | }
192 |
193 | .toggleList dt span {
194 | font-family: monospace;
195 | padding:0;
196 | margin:0;
197 | }
198 |
199 |
200 | .toggleList dd {
201 | margin:0;
202 | padding:0;
203 | }
204 |
205 | html.isjs .toggleList dd {
206 | display: none;
207 | }
208 |
209 | .toggleList pre {
210 | padding: 4px 4px 4px 4px;
211 | }
212 |
213 |
214 |
215 |
216 |
217 | /* COLORS */
218 |
219 | pre, code {
220 | color: #000000;
221 | }
222 |
223 |
224 | body {
225 | color : #333333;
226 | background-color :#FFFFFF;
227 | }
228 |
229 |
230 | h1, h2, h3, h4, h5, h6 {
231 | color:#555;
232 | }
233 |
234 | a,
235 | .toggleList dt {
236 | color: #1a7eb0;
237 | }
238 |
239 | a:hover,
240 | a:active {
241 | color: #1a7eb0;
242 | }
243 |
244 | a:visited,
245 | a:link:visited {
246 | color: #1a7eb0;
247 | }
248 |
249 | td,tr {
250 | border-color: #999999;
251 | }
252 |
253 | hr {
254 | color:#999999;
255 | background:#999999;
256 | }
257 |
258 |
259 | .TableHeadingColor {
260 | background: #dcdcdc;
261 | color: #555;
262 | }
263 |
264 |
265 | .TableSubHeadingColor {
266 | background: #EEEEFF
267 | }
268 |
269 | .TableRowColor {
270 | background: #FFFFFF
271 | }
272 |
273 |
274 | .NavBarCell1 {
275 | background-color:#dcdcdc;
276 | color:#000;
277 | }
278 |
279 | .NavBarCell1 a {
280 | color:#333;
281 | }
282 |
283 |
284 | .NavBarCell1Rev {
285 | background-color:transparent;
286 | }
287 |
288 | .NavBarFont1 {
289 | color:#333;
290 | }
291 |
292 |
293 | .NavBarFont1Rev {
294 | color:#fff;
295 | }
296 |
297 | .NavBarCell2 {
298 | background-color:#999;
299 | }
300 |
301 | .NavBarCell2 a {
302 | color:#fff;
303 | }
304 |
305 |
306 |
307 | .NavBarCell3 {
308 | background-color:#dcdcdc;
309 | }
310 |
311 |
--------------------------------------------------------------------------------
/examples/Capture/HsvSpace/HsvSpace.pde:
--------------------------------------------------------------------------------
1 | /**
2 | * HSV Space
3 | * by Ben Fry.
4 | *
5 | * Arrange the pixels from live video into the HSV Color Cone.
6 | */
7 |
8 | import processing.video.*;
9 | import java.awt.Color;
10 |
11 | Capture video;
12 | int count;
13 | boolean cheatScreen = true;
14 |
15 | static final float BOX_SIZE = 0.75;
16 | static final float CONE_HEIGHT = 1.2;
17 | static final float MAX_RADIUS = 10;
18 | static final float ROT_INCREMENT = 3.0;
19 | static final float TRANS_INCREMENT = 1;
20 | static final float STEP_AMOUNT = 0.1;
21 |
22 | Tuple[] farbe;
23 | Tuple[] trans;
24 |
25 | float[] hsb = new float[3];
26 |
27 | float leftRightAngle;
28 | float upDownAngle;
29 | float fwdBackTrans;
30 | float upDownTrans;
31 | float leftRightTrans;
32 | boolean motion;
33 |
34 | boolean blobby = false;
35 |
36 |
37 | void setup() {
38 | size(640, 480, P3D);
39 |
40 | // This the default video input, see the GettingStartedCapture
41 | // example if it creates an error
42 | video = new Capture(this, 160, 120);
43 |
44 | // Start capturing the images from the camera
45 | video.start();
46 |
47 | count = video.width * video.height;
48 |
49 | sphereDetail(60);
50 |
51 | upDownTrans = 0;
52 | leftRightTrans = 0;
53 | motion = false;
54 |
55 | leftRightAngle = 101.501297;
56 | upDownAngle = -180.098694;
57 | fwdBackTrans = 14.800003;
58 |
59 | farbe = new Tuple[count];
60 | trans = new Tuple[count];
61 | for (int i = 0; i < count; i++) {
62 | farbe[i] = new Tuple();
63 | trans[i] = new Tuple();
64 | }
65 | }
66 |
67 |
68 | void draw() {
69 | background(0);
70 |
71 | if (!blobby) {
72 | lights();
73 | }
74 |
75 | pushMatrix();
76 | translate(width/2, height/2);
77 | scale(min(width, height) / 10.0);
78 |
79 | translate(0, 0, -20 + fwdBackTrans);
80 | rotateY(radians(36 + leftRightAngle)); //, 0, 1, 0);
81 | rotateX(radians(-228 + upDownAngle)); //, 1, 0, 0);
82 |
83 | strokeWeight(0.1);
84 | if (blobby) {
85 | stroke(0.35, 0.35, 0.25, 0.15);
86 | wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 18, 18);
87 | }
88 | else {
89 | stroke(0.35, 0.35, 0.25, 0.25);
90 | wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 180, 18);
91 | }
92 |
93 | noStroke();
94 | video.loadPixels();
95 | for (int i = 0; i < count; i++) {
96 | int pixelColor = video.pixels[i];
97 | int r = (pixelColor >> 16) & 0xff;
98 | int g = (pixelColor >> 8) & 0xff;
99 | int b = pixelColor & 0xff;
100 | Color.RGBtoHSB(r, g, b, hsb);
101 |
102 | float radius = hsb[1] * hsb[2];
103 | float angle = hsb[0] * 360.0 * DEG_TO_RAD;
104 | float nx = MAX_RADIUS * radius * cos(angle);
105 | float ny = MAX_RADIUS * radius * sin(angle);
106 | float nz = hsb[2] * MAX_RADIUS * CONE_HEIGHT;
107 |
108 | trans[i].set(trans[i].x - (trans[i].x - nx)*STEP_AMOUNT,
109 | trans[i].y - (trans[i].y - ny)*STEP_AMOUNT,
110 | trans[i].z - (trans[i].z - nz)*STEP_AMOUNT);
111 |
112 | farbe[i].set(farbe[i].x - (farbe[i].x - r)*STEP_AMOUNT,
113 | farbe[i].y - (farbe[i].y - g)*STEP_AMOUNT,
114 | farbe[i].z - (farbe[i].z - b)*STEP_AMOUNT);
115 |
116 | pushMatrix();
117 | farbe[i].phil();
118 | trans[i].tran();
119 |
120 | rotate(radians(45), 1, 1, 0);
121 | if (blobby) {
122 | sphere(BOX_SIZE * 2); //, 20, 20);
123 | } else {
124 | box(BOX_SIZE);
125 | }
126 |
127 | popMatrix();
128 | }
129 | popMatrix();
130 |
131 | if (motion) {
132 | upDownAngle--;
133 | leftRightAngle--;
134 | }
135 |
136 | if (cheatScreen) {
137 | image(video, 0, height - video.height);
138 | }
139 | }
140 |
141 |
142 | void captureEvent(Capture c) {
143 | c.read();
144 | }
145 |
146 |
147 | void keyPressed() {
148 | switch (key) {
149 | case 'g':
150 | saveFrame();
151 | break;
152 | case 'c':
153 | cheatScreen = !cheatScreen;
154 | break;
155 |
156 | case 'm':
157 | motion = !motion;
158 | break;
159 | case '=':
160 | fwdBackTrans += TRANS_INCREMENT;
161 | break;
162 | case '-':
163 | fwdBackTrans -= TRANS_INCREMENT;
164 | break;
165 | case 'b':
166 | blobby = !blobby;
167 | break;
168 | }
169 | }
170 |
171 |
172 | void mouseDragged() {
173 | float dX, dY;
174 |
175 | switch (mouseButton) {
176 | case LEFT: // left right up down
177 | dX = pmouseX - mouseX;
178 | dY = pmouseY - mouseY;
179 | leftRightAngle -= dX * 0.2;
180 | upDownAngle += dY * 0.4;
181 | break;
182 |
183 | case CENTER:
184 | dX = pmouseX - mouseX;
185 | dY = pmouseY - mouseY;
186 | leftRightTrans -= TRANS_INCREMENT * dX;
187 | upDownTrans -= TRANS_INCREMENT * dY;
188 | break;
189 |
190 | case RIGHT: // in and out
191 | dY = (float) (pmouseY - mouseY);
192 | fwdBackTrans -= TRANS_INCREMENT * dY;
193 | break;
194 | }
195 | }
196 |
197 |
198 | void wireCone(float radius, float height, int stepX, int stepY) {
199 | int steps = 10;
200 | stroke(40);
201 | for (int i = 0; i < steps; i++) {
202 | float angle = map(i, 0, steps, 0, TWO_PI);
203 | float x = radius * cos(angle);
204 | float y = radius * sin(angle);
205 | line(x, y, height, 0, 0, 0);
206 | }
207 | noFill();
208 | pushMatrix();
209 | translate(0, 0, height);
210 | ellipseMode(CENTER);
211 | ellipse(0, 0, radius, radius);
212 | popMatrix();
213 | }
214 |
--------------------------------------------------------------------------------
/scripts/macosx_relocator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Adapted from osxrelocator, https://github.com/GStreamer/cerbero/blob/master/cerbero/tools/osxrelocator.py
3 | # Part of cerbero - a multi-platform build system for Open Source software
4 | # Copyright (C) 2012 Andoni Morales Alastruey
5 | #
6 | # This library is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Library General Public
8 | # License as published by the Free Software Foundation; either
9 | # version 2 of the License, or (at your option) any later version.
10 | #
11 | # This library is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 | # Library General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Library General Public
17 | # License along with this library; if not, write to the
18 | # Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 | # Boston, MA 02111-1307, USA.
20 |
21 | from __future__ import print_function
22 | import os
23 | import sys
24 | import subprocess
25 |
26 |
27 | INT_CMD = 'install_name_tool'
28 | OTOOL_CMD = 'otool'
29 |
30 |
31 | def shell_call(cmd, cmd_dir='.', fail=True):
32 | try:
33 | ret = subprocess.check_call(
34 | cmd, cwd=cmd_dir,
35 | env=os.environ.copy())
36 | except subprocess.CalledProcessError:
37 | if fail:
38 | raise SystemError("Error running command: {}".format(cmd))
39 | else:
40 | ret = 0
41 | return ret
42 |
43 |
44 | def shell_check_call(cmd):
45 | try:
46 | process = subprocess.Popen(
47 | cmd, stdout=subprocess.PIPE)
48 | output, _ = process.communicate()
49 | except Exception:
50 | raise SystemError("Error running command: {}".format(cmd))
51 | if (sys.version_info > (3, 0)):
52 | return output.decode()
53 | else:
54 | return output
55 |
56 | class OSXRelocator(object):
57 | '''
58 | Wrapper for OS X's install_name_tool and otool commands to help
59 | relocating shared libraries.
60 |
61 | It parses lib/ /libexec and bin/ directories, changes the prefix path of
62 | the shared libraries that an object file uses and changes it's library
63 | ID if the file is a shared library.
64 | '''
65 |
66 | def __init__(self, root, lib_prefix, new_lib_prefix, recursive):
67 | self.root = root
68 | self.lib_prefix = self._fix_path(lib_prefix)
69 | self.new_lib_prefix = self._fix_path(new_lib_prefix)
70 | self.recursive = recursive
71 |
72 | def relocate(self):
73 | self.parse_dir(self.root, filters=['', '.dylib', '.so'])
74 |
75 | def relocate_file(self, object_file, id=None):
76 | self.change_libs_path(object_file)
77 | self.change_id(object_file, id)
78 |
79 | def change_id(self, object_file, id=None):
80 | id = id or object_file.replace(self.lib_prefix, self.new_lib_prefix)
81 | filename = os.path.basename(object_file)
82 | if not (filename.endswith('so') or filename.endswith('dylib')):
83 | return
84 | cmd = [INT_CMD, "-id", id, object_file]
85 | shell_call(cmd, fail=False)
86 |
87 | def change_libs_path(self, object_file):
88 | for lib in self.list_shared_libraries(object_file):
89 | if self.lib_prefix in lib:
90 | new_lib = lib.replace(self.lib_prefix, self.new_lib_prefix)
91 | cmd = [INT_CMD, "-change", lib, new_lib, object_file]
92 | shell_call(cmd)
93 |
94 | def parse_dir(self, dir_path, filters=None):
95 | for dirpath, dirnames, filenames in os.walk(dir_path):
96 | for f in filenames:
97 | if filters is not None and \
98 | os.path.splitext(f)[1] not in filters:
99 | continue
100 | fn = os.path.join(dirpath, f)
101 | if os.path.islink(fn):
102 | continue
103 | if not os.path.isfile(fn):
104 | continue
105 | self.relocate_file(fn)
106 | if not self.recursive:
107 | break
108 |
109 | @staticmethod
110 | def list_shared_libraries(object_file):
111 | cmd = [OTOOL_CMD, "-L", object_file]
112 | res = shell_check_call(cmd).split('\n')
113 | # We don't use the first line
114 | libs = res[1:]
115 | # Remove the first character tabulation
116 | libs = [x[1:] for x in libs]
117 | # Remove the version info
118 | libs = [x.split(' ', 1)[0] for x in libs]
119 | return libs
120 |
121 | @staticmethod
122 | def library_id_name(object_file):
123 | cmd = [OTOOL_CMD, "-D", object_file]
124 | res = shell_check_call(cmd).split('\n')[0]
125 | # the library name ends with ':'
126 | lib_name = res[:-1]
127 | return lib_name
128 |
129 | def _fix_path(self, path):
130 | # if path.endswith('/'):
131 | # return path[:-1]
132 | return path
133 |
134 |
135 | class Main(object):
136 |
137 | def run(self):
138 | # We use OptionParser instead of ArgumentsParse because this script
139 | # might be run in OS X 10.6 or older, which do not provide the argparse
140 | # module
141 | import optparse
142 | usage = "usage: %prog [options] directory old_prefix new_prefix"
143 | description = 'Rellocates object files changing the dependant '\
144 | ' dynamic libraries location path with a new one'
145 | parser = optparse.OptionParser(usage=usage, description=description)
146 | parser.add_option('-r', '--recursive', action='store_true',
147 | default=False, dest='recursive',
148 | help='Scan directories recursively')
149 |
150 | options, args = parser.parse_args()
151 | if len(args) != 3:
152 | parser.print_usage()
153 | exit(1)
154 |
155 | if not os.path.exists(args[0]):
156 | print('Error: directory', args[0], 'does not exist')
157 | exit(1)
158 |
159 | relocator = OSXRelocator(args[0], args[1], args[2], options.recursive)
160 | relocator.relocate()
161 | exit(0)
162 |
163 | def main():
164 | main = Main()
165 | main.run()
166 |
167 | if __name__ == "__main__":
168 | main()
169 |
--------------------------------------------------------------------------------
/scripts/macosx_remove_extra_libs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # This script removes base libraries that are not present as the dependency of any other library
4 | # in the distribution
5 |
6 | # Original version by Gottfried Haider:
7 | # https://github.com/gohai/processing-glvideo/blob/master/src/native/macosx_remove_extra_libs.py
8 |
9 | from __future__ import print_function
10 | import os
11 | import sys
12 | import subprocess
13 | import re
14 |
15 | lib_folder = '../library/macos-universal'
16 |
17 | # First, remove libraries from packages we don't bundle with the video library:
18 | # gstreamer-1.0-codecs-gpl
19 | # gstreamer-1.0-codecs-restricted
20 | # gstreamer-1.0-net-restricted
21 | # gstreamer-1.0-effects
22 | # gstreamer-1.0-encoding
23 | # gstreamer-1.0-visualizers
24 | # gstreamer-1.0-devtools
25 | # gstreamer-1.0-dvd
26 |
27 | libs_to_remove = [
28 | "liba52.0.dylib",
29 | "liba52.dylib",
30 | "libass.9.dylib",
31 | "libass.dylib",
32 | "libdca.0.dylib",
33 | "libdca.dylib",
34 | "libdvdnav.4.dylib",
35 | "libdvdnav.dylib",
36 | "libdvdread.4.dylib",
37 | "libdvdread.dylib",
38 | "libmms.0.dylib",
39 | "libmms.dylib",
40 | "libopencore-amrnb.0.dylib",
41 | "libopencore-amrnb.dylib",
42 | "libopencore-amrwb.0.dylib",
43 | "libopencore-amrwb.dylib",
44 | "librtmp.1.dylib",
45 | "librtmp.dylib",
46 | "libSoundTouch.1.dylib",
47 | "libSoundTouch.dylib",
48 | "libvisual-0.4.0.dylib",
49 | "libvisual-0.4.dylib",
50 | "libvo-aacenc.0.dylib",
51 | "libvo-aacenc.dylib",
52 | "libwebrtc_audio_processing.0.dylib",
53 | "libwebrtc_audio_processing.dylib",
54 | "libx264.148.dylib",
55 | "libx264.dylib"]
56 |
57 | plugins_to_remove = [
58 | "libgsta52dec.dylib",
59 | "libgstaccurip.dylib",
60 | "libgstaiff.dylib",
61 | "libgstalpha.dylib",
62 | "libgstalphacolor.dylib",
63 | "libgstamrnb.dylib",
64 | "libgstamrwbdec.dylib",
65 | "libgstasf.dylib",
66 | "libgstasfmux.dylib",
67 | "libgstassrender.dylib",
68 | "libgstaudiobuffersplit.dylib",
69 | "libgstaudiofx.dylib",
70 | "libgstaudiofxbad.dylib",
71 | "libgstaudiolatency.dylib",
72 | "libgstaudiovisualizers.dylib",
73 | "libgstautoconvert.dylib",
74 | "libgstbayer.dylib",
75 | "libgstcairo.dylib",
76 | "libgstclosedcaption.dylib",
77 | "libgstcoloreffects.dylib",
78 | "libgstcutter.dylib",
79 | "libgstdebug.dylib",
80 | "libgstdebugutilsbad.dylib",
81 | "libgstdeinterlace.dylib",
82 | "libgstdtmf.dylib",
83 | "libgstdtsdec.dylib",
84 | "libgstdvdlpcmdec.dylib",
85 | "libgstdvdread.dylib",
86 | "libgstdvdsub.dylib",
87 | "libgsteffectv.dylib",
88 | "libgstencoding.dylib",
89 | "libgstequalizer.dylib",
90 | "libgstfieldanalysis.dylib",
91 | "libgstfreeverb.dylib",
92 | "libgstfrei0r.dylib",
93 | "libgstgaudieffects.dylib",
94 | "libgstgdkpixbuf.dylib",
95 | "libgstgeometrictransform.dylib",
96 | "libgstgoom.dylib",
97 | "libgstgoom2k1.dylib",
98 | "libgstimagefreeze.dylib",
99 | "libgstinter.dylib",
100 | "libgstinterlace.dylib",
101 | "libgstinterleave.dylib",
102 | "libgstivtc.dylib",
103 | "libgstladspa.dylib",
104 | "libgstlegacyrawparse.dylib",
105 | "libgstlevel.dylib",
106 | "libgstlibvisual.dylib",
107 | "libgstmms.dylib",
108 | "libgstmpegpsdemux.dylib",
109 | "libgstmpegpsmux.dylib",
110 | "libgstmpegtsdemux.dylib",
111 | "libgstmpegtsmux.dylib",
112 | "libgstmultifile.dylib",
113 | "libgstproxy.dylib",
114 | "libgstrealmedia.dylib",
115 | "libgstremovesilence.dylib",
116 | "libgstreplaygain.dylib",
117 | "libgstresindvd.dylib",
118 | "libgstrtmp.dylib",
119 | "libgstsegmentclip.dylib",
120 | "libgstshapewipe.dylib",
121 | "libgstsmooth.dylib",
122 | "libgstsmpte.dylib",
123 | "libgstsoundtouch.dylib",
124 | "libgstspectrum.dylib",
125 | "libgstspeed.dylib",
126 | "libgstvideobox.dylib",
127 | "libgstvideocrop.dylib",
128 | "libgstvideofiltersbad.dylib",
129 | "libgstvideomixer.dylib",
130 | "libgstvoaacenc.dylib",
131 | "libgstwebrtcdsp.dylib",
132 | "libgstx264.dylib",
133 | "libgstxingmux.dylib"]
134 |
135 | for name in libs_to_remove:
136 | fn = lib_folder + '/' + name
137 | if os.path.exists(fn):
138 | try:
139 | print('Removing extra ' + fn + ' ... ', end='')
140 | os.remove(fn)
141 | print('Done')
142 | except:
143 | print('Fail')
144 | else:
145 | print("Library", name, "does not exist")
146 |
147 | for name in plugins_to_remove:
148 | fn = lib_folder + '/gstreamer-1.0/' + name
149 | if os.path.exists(fn):
150 | try:
151 | print('Removing extra ' + fn + ' ... ', end='')
152 | os.remove(fn)
153 | print('Done')
154 | except:
155 | print('Fail')
156 | else:
157 | print("Plugin", name, "does not exist")
158 |
159 | # Removing duplicated files...
160 |
161 | exclude = ["libavcodec.58.dylib",
162 | "libavfilter.7.dylib",
163 | "libavformat.58.dylib",
164 | "libavutil.56.dylib",
165 | "libhogweed.4.dylib",
166 | "libnettle.6.dylib",
167 | "libopenh264.4.dylib",
168 | "libopenjp2.7.dylib",
169 | "libsrtp.1.dylib",
170 | "libsrt.1.dylib",
171 | "libswresample.3.dylib",
172 | "libtag.1.dylib",
173 | "libz.1.dylib"]
174 |
175 | files = []
176 |
177 | # add all modules
178 | for fn in os.listdir(lib_folder):
179 | if fn.endswith('.so') or fn.endswith('.dylib'):
180 | files.append(fn)
181 |
182 | for fn in files:
183 | p = fn.find('.')
184 | prefix = fn[0:p]
185 | for fn1 in files:
186 | if len(fn) <= len(fn1): continue
187 | if prefix in fn1 and os.path.exists(lib_folder + '/' + fn1) and not fn1 in exclude:
188 | try:
189 | print('Removing duplicate ' + fn1 + ' ... ', end='')
190 | os.remove(lib_folder + '/' + fn1)
191 | print('Done')
192 | except:
193 | print('Fail')
194 |
195 | # Now, removing libraries that are not depended upon...
196 |
197 | pattern = re.compile('@loader_path/([^ ]+) ')
198 |
199 | required = []
200 | to_check = []
201 |
202 | # add all modules
203 | for fn in os.listdir(lib_folder):
204 | if fn.endswith('.so') or fn.endswith('.dylib'):
205 | to_check.append(fn)
206 |
207 | for fn in os.listdir(lib_folder + '/gstreamer-1.0'):
208 | if fn.endswith('.so') or fn.endswith('.dylib'):
209 | to_check.append('gstreamer-1.0/' + fn)
210 |
211 | while 0 < len(to_check):
212 | tested = to_check.pop()
213 | required.append(tested)
214 | out = subprocess.check_output('otool -L ' + lib_folder + '/' + tested, shell=True)
215 | if sys.version_info > (3, 0):
216 | out = out.decode()
217 |
218 | deps = pattern.findall(out)
219 | for dep in deps:
220 | # we're in the module directory, remove any trailing ../
221 | if '/' in tested and dep[0:3] == '../':
222 | dep = dep[3:]
223 | if dep not in required:
224 | required.append(dep)
225 |
226 | required.sort()
227 |
228 | # remove unneeded libs
229 | for fn in os.listdir(lib_folder):
230 | if fn.endswith('.so') or fn.endswith('.dylib') and fn not in required:
231 | try:
232 | print('Removing unused ' + fn + ' ... ', end='')
233 | os.remove(lib_folder + '/' + fn)
234 | print('Done')
235 | except:
236 | print('Fail')
237 |
--------------------------------------------------------------------------------
/resources/code/ExampleTaglet.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2002 Sun Microsystems, Inc. All Rights Reserved.
3 | *
4 | * Redistribution and use in source and binary forms, with or
5 | * without modification, are permitted provided that the following
6 | * conditions are met:
7 | *
8 | * -Redistributions of source code must retain the above copyright
9 | * notice, this list of conditions and the following disclaimer.
10 | *
11 | * -Redistribution in binary form must reproduce the above copyright
12 | * notice, this list of conditions and the following disclaimer in
13 | * the documentation and/or other materials provided with the
14 | * distribution.
15 | *
16 | * Neither the name of Sun Microsystems, Inc. or the names of
17 | * contributors may be used to endorse or promote products derived
18 | * from this software without specific prior written permission.
19 | *
20 | * This software is provided "AS IS," without a warranty of any
21 | * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
22 | * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
23 | * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
24 | * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY
25 | * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR
26 | * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THE SOFTWARE OR
27 | * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE
28 | * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT,
29 | * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
30 | * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF
31 | * THE USE OF OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN
32 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
33 | *
34 | * You acknowledge that Software is not designed, licensed or
35 | * intended for use in the design, construction, operation or
36 | * maintenance of any nuclear facility.
37 | */
38 |
39 | import com.sun.tools.doclets.Taglet;
40 | import com.sun.javadoc.*;
41 | import java.util.Map;
42 | import java.io.*;
43 | /**
44 | * A sample Taglet representing @example. This tag can be used in any kind of
45 | * {@link com.sun.javadoc.Doc}. It is not an inline tag. The text is displayed
46 | * in yellow to remind the developer to perform a task. For
47 | * example, "@example Hello" would be shown as:
48 | *
49 | * -
50 | * To Do:
51 | *
53 | *
54 | *
55 | * @author Jamie Ho
56 | * @since 1.4
57 | */
58 |
59 | public class ExampleTaglet implements Taglet {
60 |
61 | private static final String NAME = "example";
62 | private static final String HEADER = "example To Do:";
63 |
64 | /**
65 | * Return the name of this custom tag.
66 | */
67 | public String getName() {
68 | return NAME;
69 | }
70 |
71 | /**
72 | * Will return true since @example
73 | * can be used in field documentation.
74 | * @return true since @example
75 | * can be used in field documentation and false
76 | * otherwise.
77 | */
78 | public boolean inField() {
79 | return true;
80 | }
81 |
82 | /**
83 | * Will return true since @example
84 | * can be used in constructor documentation.
85 | * @return true since @example
86 | * can be used in constructor documentation and false
87 | * otherwise.
88 | */
89 | public boolean inConstructor() {
90 | return true;
91 | }
92 |
93 | /**
94 | * Will return true since @example
95 | * can be used in method documentation.
96 | * @return true since @example
97 | * can be used in method documentation and false
98 | * otherwise.
99 | */
100 | public boolean inMethod() {
101 | return true;
102 | }
103 |
104 | /**
105 | * Will return true since @example
106 | * can be used in method documentation.
107 | * @return true since @example
108 | * can be used in overview documentation and false
109 | * otherwise.
110 | */
111 | public boolean inOverview() {
112 | return true;
113 | }
114 |
115 | /**
116 | * Will return true since @example
117 | * can be used in package documentation.
118 | * @return true since @example
119 | * can be used in package documentation and false
120 | * otherwise.
121 | */
122 | public boolean inPackage() {
123 | return true;
124 | }
125 |
126 | /**
127 | * Will return true since @example
128 | * can be used in type documentation (classes or interfaces).
129 | * @return true since @example
130 | * can be used in type documentation and false
131 | * otherwise.
132 | */
133 | public boolean inType() {
134 | return true;
135 | }
136 |
137 | /**
138 | * Will return false since @example
139 | * is not an inline tag.
140 | * @return false since @example
141 | * is not an inline tag.
142 | */
143 |
144 | public boolean isInlineTag() {
145 | return false;
146 | }
147 |
148 | /**
149 | * Register this Taglet.
150 | * @param tagletMap the map to register this tag to.
151 | */
152 | public static void register(Map tagletMap) {
153 | ExampleTaglet tag = new ExampleTaglet();
154 | Taglet t = (Taglet) tagletMap.get(tag.getName());
155 | if (t != null) {
156 | tagletMap.remove(tag.getName());
157 | }
158 | tagletMap.put(tag.getName(), tag);
159 | }
160 |
161 | /**
162 | * Given the Tag representation of this custom
163 | * tag, return its string representation.
164 | * @param tag the Tag representation of this custom tag.
165 | */
166 | public String toString(Tag tag) {
167 | return createHTML(readFile(tag.text()));
168 | }
169 |
170 |
171 | /**
172 | * Given an array of Tags representing this custom
173 | * tag, return its string representation.
174 | * @param tags the array of Tags representing of this custom tag.
175 | */
176 | public String toString(Tag[] tags) {
177 | if (tags.length == 0) {
178 | return null;
179 | }
180 | return createHTML(readFile(tags[0].text()));
181 | }
182 |
183 |
184 |
185 | String createHTML(String theString) {
186 | if(theString!=null) {
187 | String dd = "";
193 |
194 | return dd+"\n" +
195 | "
- +Example
" +
196 | ""+theString+"
" +
197 | "
";
198 | }
199 | return "";
200 | }
201 |
202 |
203 | /**
204 | * check if the examples directory exists and return the example as given in the tag.
205 | * @param theExample the name of the example
206 | */
207 | String readFile(String theExample) {
208 | String record = "";
209 | String myResult = "";
210 | int recCount = 0;
211 | String myDir = "../examples";
212 | File file=new File(myDir);
213 | if(file.exists()==false) {
214 | myDir = "./examples";
215 | }
216 | try {
217 | FileReader fr = new FileReader(myDir+"/"+theExample+"/"+theExample+".pde");
218 | BufferedReader br = new BufferedReader(fr);
219 | record = new String();
220 | while ((record = br.readLine()) != null) {
221 | myResult += record+"\n";
222 | }
223 | } catch (IOException e) {
224 | System.out.println(e);
225 | return null;
226 | }
227 | return myResult;
228 | }
229 | }
230 |
231 |
232 |
--------------------------------------------------------------------------------
/src/processing/video/Video.java:
--------------------------------------------------------------------------------
1 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2 |
3 | /*
4 | Part of the Processing project - http://processing.org
5 |
6 | Copyright (c) 2012-22 The Processing Foundation
7 | Copyright (c) 2011-12 Ben Fry and Casey Reas
8 | GStreamer implementation ported from GSVideo library by Andres Colubri
9 |
10 | This library is free software; you can redistribute it and/or
11 | modify it under the terms of the GNU Lesser General Public
12 | License as published by the Free Software Foundation; either
13 | version 2.1 of the License, or (at your option) any later version.
14 |
15 | This library is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 | Lesser General Public License for more details.
19 |
20 | You should have received a copy of the GNU Lesser General
21 | Public License along with this library; if not, write to the
22 | Free Software Foundation, Inc., 59 Temple Place, Suite 330,
23 | Boston, MA 02111-1307 USA
24 | */
25 |
26 | package processing.video;
27 |
28 | import org.freedesktop.gstreamer.*;
29 | import processing.core.PApplet;
30 | import processing.core.PConstants;
31 |
32 | import java.io.File;
33 | import java.nio.ByteOrder;
34 | import java.nio.file.Paths;
35 | import java.util.List;
36 |
37 | /**
38 | * This class contains some basic functions used by the rest of the classes in
39 | * this library.
40 | */
41 | public class Video implements PConstants {
42 | // Allows to set the amount of desired debug output from GStreamer, according to the following table:
43 | // https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html?gi-language=c#printing-debug-information
44 | public static int DEBUG_LEVEL = 1;
45 |
46 | // Path that the video library will use to load the GStreamer base libraries
47 | // and plugins from. They can be passed from the application using the
48 | // gstreamer.library.path and gstreamer.plugin.path system variables (see
49 | // comments in initImpl() below).
50 | public static String gstreamerLibPath = "";
51 | public static String gstreamerPluginPath = "";
52 |
53 | protected static boolean usingGStreamerSystemInstall = false;
54 |
55 | // OpenGL texture used as buffer sink by default, when the renderer is
56 | // GL-based. This can improve performance significantly, since the video
57 | // frames are automatically copied into the texture without passing through
58 | // the pixels arrays, as well as having the color conversion into RGBA handled
59 | // natively by GStreamer.
60 | protected static boolean useGLBufferSink = true;
61 |
62 | protected static boolean defaultGLibContext = false;
63 |
64 | protected static long INSTANCES_COUNT = 0;
65 |
66 | protected static int bitsJVM;
67 | static {
68 | bitsJVM = PApplet.parseInt(System.getProperty("sun.arch.data.model"));
69 | }
70 |
71 |
72 | static protected void init() {
73 | if (INSTANCES_COUNT == 0) {
74 | initImpl();
75 | }
76 | INSTANCES_COUNT++;
77 | }
78 |
79 |
80 | static protected void restart() {
81 | removePlugins();
82 | Gst.deinit();
83 | initImpl();
84 | }
85 |
86 |
87 | static protected void initImpl() {
88 | // The video library loads the GStreamer libraries according to the following
89 | // priority:
90 | // 1) If the VM argument "gstreamer.library.path" exists, it will use it as the
91 | // root location of the libraries. This is typically the case when running
92 | // the library from Eclipse.
93 | // 2) If the environmental variable is GSTREAMER_1_0_ROOT_(MINGW/MSVC)_64 is defined then
94 | // will try to use its contents as the root path of the system install of GStreamer.
95 | // 3) The bundled version of GStreamer will be used, if present.
96 | // 4) If none of the above works, then will try to use default install locations of GStreamer
97 | // on Windows and Mac, if they exist.
98 | // In this way, priority is given to the system installation of GStreamer only if set in the
99 | // environmental variables, otherwise will try to load the bundled GStreamer, and if it does not
100 | // exist it will look for GStreamer in the system-wide locations. This gives the user the option
101 | // to remove the bundled GStreamer libs to default to the system-wide installation.
102 | String libPath = System.getProperty("gstreamer.library.path");
103 | int winBuildType = 0; // 0: default build, 1: mingw, 2: msvc
104 | if (libPath != null) {
105 | gstreamerLibPath = libPath;
106 |
107 | // If the GStreamer installation referred by gstreamer.library.path is not
108 | // a system installation, then the path containing the plugins needs to be
109 | // specified separately, otherwise the plugins will be automatically
110 | // loaded from the default location. The system property for the plugin
111 | // path is "gstreamer.plugin.path"
112 | String pluginPath = System.getProperty("gstreamer.plugin.path");
113 | if (pluginPath != null) {
114 | gstreamerPluginPath = pluginPath;
115 | }
116 |
117 | usingGStreamerSystemInstall = false;
118 | } else {
119 | String rootPath = "";
120 | if (bitsJVM == 64) {
121 | // Get 64-bit root of GStreamer install
122 | if (System.getenv("GSTREAMER_1_0_ROOT_X86_64") != null) {
123 | winBuildType = 0;
124 | rootPath = System.getenv("GSTREAMER_1_0_ROOT_X86_64");
125 | } else if (System.getenv("GSTREAMER_1_0_ROOT_MINGW_X86_64") != null) {
126 | winBuildType = 1;
127 | rootPath = System.getenv("GSTREAMER_1_0_ROOT_MINGW_X86_64");
128 | } else if (System.getenv("GSTREAMER_1_0_ROOT_MSVC_X86_64") != null) {
129 | winBuildType = 2;
130 | rootPath = System.getenv("GSTREAMER_1_0_ROOT_MSVC_X86_64");
131 | }
132 | }
133 |
134 | if (!rootPath.equals("")) {
135 | if (PApplet.platform == MACOS) {
136 | gstreamerLibPath = Paths.get(rootPath, "lib").toString();
137 | } else {
138 | gstreamerLibPath = Paths.get(rootPath, "bin").toString();
139 | }
140 | File path = new File(gstreamerLibPath);
141 | if (path.exists()) {
142 | // We have a system install of GStreamer
143 | usingGStreamerSystemInstall = true;
144 | buildSystemPaths(rootPath);
145 | } else {
146 | // The environmental variables contain invalid paths...
147 | gstreamerLibPath = "";
148 | }
149 | }
150 | }
151 |
152 | if (libPath == null && !usingGStreamerSystemInstall) {
153 | // No GStreamer path in the VM arguments, and not system-wide install in environmental variables,
154 | // will try searching for the bundled GStreamer libs.
155 | if (buildBundldedPaths()) {
156 | // Found bundled GStreamer libs, which in version 2.2 of the library are MSVC-built:
157 | winBuildType = 2;
158 | }
159 | }
160 |
161 | if (gstreamerLibPath.equals("")) {
162 | // Finally, no environmental variables defined and did not find bundled gstreamer,
163 | // will try some default system-wide locations.
164 | String rootPath = "";
165 | if (PApplet.platform == MACOS) {
166 | rootPath = "/Library/Frameworks/GStreamer.framework/Versions/1.0";
167 | gstreamerLibPath = Paths.get(rootPath, "lib").toString();
168 | } else if (PApplet.platform == WINDOWS) {
169 | if (bitsJVM == 64) {
170 | if (new File("C:\\gstreamer\\1.0\\x86_64").exists()) {
171 | winBuildType = 0;
172 | rootPath = "C:\\gstreamer\\1.0\\x86_64";
173 | } else if (new File("C:\\gstreamer\\1.0\\mingw_x86_64").exists()) {
174 | winBuildType = 1;
175 | rootPath = "C:\\gstreamer\\1.0\\mingw_x86_64";
176 | } else if (new File("C:\\gstreamer\\1.0\\msvc_x86_64").exists()) {
177 | winBuildType = 2;
178 | rootPath = "C:\\gstreamer\\1.0\\msvc_x86_64";
179 | }
180 | gstreamerLibPath = Paths.get(rootPath, "bin").toString();
181 | }
182 | } else if (PApplet.platform == LINUX) {
183 | if (bitsJVM == 64) {
184 | rootPath = "/lib/x86_64-linux-gnu";
185 | } else {
186 | rootPath = "/lib/x86-linux-gnu";
187 | }
188 | File gstlib = new File(rootPath, "libgstreamer-1.0.so.0");
189 | if (gstlib.exists()) {
190 | gstreamerLibPath = Paths.get(rootPath).toString();
191 | }
192 | }
193 |
194 | File path = new File(gstreamerLibPath);
195 | if (path.exists()) {
196 | // We have a system install of GStreamer
197 | if (bitsJVM == 64) {
198 | if (winBuildType == 0) {
199 | Environment.libc.setenv("GSTREAMER_1_0_ROOT_X86_64", gstreamerLibPath, true);
200 | } else if (winBuildType == 1) {
201 | Environment.libc.setenv("GSTREAMER_1_0_ROOT_MINGW_X86_64", gstreamerLibPath, true);
202 | } else if (winBuildType == 2) {
203 | Environment.libc.setenv("GSTREAMER_1_0_ROOT_MSVC_X86_64", gstreamerLibPath, true);
204 | }
205 | }
206 | buildSystemPaths(rootPath);
207 | } else {
208 | System.err.println("We could not find a system-wide or bundled installation of GStreamer, but video might still work if GStreamer was placed somewhere else");
209 | }
210 | usingGStreamerSystemInstall = true;
211 | }
212 |
213 | if (!gstreamerLibPath.equals("")) {
214 | // Should be safe because this is setting the jna.library.path,
215 | // not java.library.path, and JNA is being provided by the video library.
216 | // This will need to change if JNA is ever moved into more of a shared
217 | // location (i.e. part of core) because this would overwrite the prop.
218 | System.setProperty("jna.library.path", gstreamerLibPath);
219 | }
220 |
221 | Environment.libc.setenv("GST_DEBUG", String.valueOf(DEBUG_LEVEL), true);
222 |
223 | if (!usingGStreamerSystemInstall) {
224 | // Disable the use of gst-plugin-scanner on environments where we're
225 | // not using the host system's installation of GStreamer
226 | // the problem with gst-plugin-scanner is that the library expects it
227 | // to exist at a specific location determined at build time
228 | Environment.libc.setenv("GST_REGISTRY_FORK", "no", true);
229 |
230 | // Prevent globally installed libraries from being used on platforms
231 | // where we ship GStreamer
232 | if (!gstreamerPluginPath.equals("")) {
233 | Environment.libc.setenv("GST_PLUGIN_SYSTEM_PATH_1_0", "", true);
234 | }
235 | }
236 |
237 | if (PApplet.platform == WINDOWS || (!usingGStreamerSystemInstall && PApplet.platform == LINUX)) {
238 | // Pre-loading base GStreamer libraries on Windows and Linux,
239 | // otherwise dynamic dependencies cannot be resolved.
240 | LibraryLoader loader = LibraryLoader.getInstance(winBuildType);
241 | if (loader == null) {
242 | System.err.println("Cannot load GStreamer libraries.");
243 | }
244 | }
245 |
246 | String[] args = { "" };
247 | Gst.setUseDefaultContext(defaultGLibContext);
248 | Gst.init("Processing core video", args);
249 |
250 | // Output GStreamer version, lib path, plugin path
251 | // and whether a system install is being used
252 | printGStreamerInfo();
253 |
254 | if (!usingGStreamerSystemInstall) {
255 | // Plugins are scanned explicitly from the bindings if using the
256 | // local GStreamer
257 | scanPlugins();
258 | }
259 | }
260 |
261 | static protected void printGStreamerInfo() {
262 | String locInfo = "";
263 | if (usingGStreamerSystemInstall) locInfo = "system-wide";
264 | else locInfo = "bundled";
265 | System.out.println("Processing video library using " + locInfo + " GStreamer " + Gst.getVersion());
266 | }
267 |
268 |
269 | static protected void scanPlugins() {
270 | if (!gstreamerPluginPath.equals("")) {
271 | Registry reg = Registry.get();
272 | boolean res;
273 | System.out.print("Scanning GStreamer plugins...");
274 | res = reg.scanPath(gstreamerPluginPath);
275 | if (res) {
276 | System.out.println(" Done.");
277 | } else {
278 | System.err.println("Cannot load GStreamer plugins from " + gstreamerPluginPath);
279 | }
280 | }
281 | }
282 |
283 |
284 | static protected void removePlugins() {
285 | Registry reg = Registry.get();
286 | List list = reg.getPluginList();
287 | for (Plugin plg : list) {
288 | reg.removePlugin(plg);
289 | }
290 | }
291 |
292 |
293 | /**
294 | * Search for an item by checking folders listed in java.library.path
295 | * for a specific name.
296 | */
297 | @SuppressWarnings("SameParameterValue")
298 | static private String searchLibraryPath(String what) {
299 | String libraryPath = System.getProperty("java.library.path");
300 | // Should not be null, but cannot assume
301 | if (libraryPath != null) {
302 | String[] folders = PApplet.split(libraryPath, File.pathSeparatorChar);
303 | // Usually, the most relevant paths will be at the front of the list,
304 | // so hopefully this will not walk several entries.
305 | for (String folder : folders) {
306 | // Skip /lib and /usr/lib folders because they contain the system-wide GStreamer on Linux
307 | // and they are on the Java library path.
308 | if (folder.startsWith("/lib/") || folder.startsWith("/usr/lib/")) continue;
309 | File file = new File(folder, what);
310 | if (file.exists()) {
311 | return file.getAbsolutePath();
312 | }
313 | }
314 | }
315 | return null;
316 | }
317 |
318 |
319 | /**
320 | * Search for an item by checking folders listed in java.class.path
321 | * for a specific name.
322 | */
323 | @SuppressWarnings("SameParameterValue")
324 | static private String searchClassPath(String what) {
325 | String classPath = System.getProperty("java.class.path");
326 | // Should not be null, but cannot assume
327 | if (classPath != null) {
328 | String[] entries = PApplet.split(classPath, File.pathSeparatorChar);
329 | // Usually, the most relevant paths will be at the front of the list,
330 | // so hopefully this will not walk several entries.
331 | for (String entry : entries) {
332 | File dir = new File(entry);
333 | // If it's a .jar file, get its parent folder. This will lead to some
334 | // double-checking of the same folder, but probably almost as expensive
335 | // to keep track of folders we've already seen.
336 | if (dir.isFile()) {
337 | dir = dir.getParentFile();
338 | }
339 | File file = new File(dir, what);
340 | if (file.exists()) {
341 | return file.getAbsolutePath();
342 | }
343 | }
344 | }
345 | return null;
346 | }
347 |
348 | static protected void buildSystemPaths(String rootPath) {
349 | if (System.getenv("GST_PLUGIN_SYSTEM_PATH") != null) {
350 | gstreamerPluginPath = System.getenv("GST_PLUGIN_SYSTEM_PATH");
351 | } else {
352 | if (PApplet.platform == WINDOWS) {
353 | gstreamerPluginPath = Paths.get(rootPath, "lib", "gstreamer-1.0").toString();
354 | } else {
355 | gstreamerPluginPath = Paths.get(gstreamerLibPath, "gstreamer-1.0").toString(); }
356 | }
357 | File path = new File(gstreamerPluginPath);
358 | if (!path.exists()) {
359 | gstreamerPluginPath = "";
360 | }
361 | }
362 |
363 | static protected boolean buildBundldedPaths() {
364 | // look for the gstreamer-1.0 folder in the native library path
365 | // (there are natives adjacent to it, so this will work)
366 | gstreamerPluginPath = searchLibraryPath("gstreamer-1.0");
367 | if (gstreamerPluginPath == null) {
368 | gstreamerPluginPath = searchClassPath("gstreamer-1.0");
369 | }
370 |
371 | if (gstreamerPluginPath == null) {
372 | gstreamerPluginPath = "";
373 | gstreamerLibPath = "";
374 | usingGStreamerSystemInstall = true;
375 | return false;
376 | } else {
377 | File gstreamerLibDir = new File(gstreamerPluginPath).getParentFile();
378 | gstreamerLibPath = gstreamerLibDir.getAbsolutePath();
379 | return true;
380 | }
381 | }
382 |
383 |
384 | static protected float nanoSecToSecFrac(long nanosec) {
385 | return (float)(nanosec / 1E9);
386 | }
387 |
388 |
389 | static protected long secToNanoLong(float sec) {
390 | Double f = Double.valueOf(sec * 1E9);
391 | return f.longValue();
392 | }
393 |
394 |
395 | /**
396 | * Reorders an OpenGL pixel array (RGBA) into ARGB. The array must be
397 | * of size width * height.
398 | * @param pixels int[]
399 | */
400 | static protected void convertToARGB(int[] pixels, int width, int height) {
401 | int t = 0;
402 | int p = 0;
403 | if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) {
404 | // RGBA to ARGB conversion: shifting RGB 8 bits to the right,
405 | // and placing A 24 bits to the left.
406 | for (int y = 0; y < height; y++) {
407 | for (int x = 0; x < width; x++) {
408 | int pixel = pixels[p++];
409 | pixels[t++] = (pixel >>> 8) | ((pixel << 24) & 0xFF000000);
410 | }
411 | }
412 | } else {
413 | // We have to convert ABGR into ARGB, so R and B must be swapped,
414 | // A and G just brought back in.
415 | for (int y = 0; y < height; y++) {
416 | for (int x = 0; x < width; x++) {
417 | int pixel = pixels[p++];
418 | pixels[t++] = ((pixel & 0xFF) << 16) | ((pixel & 0xFF0000) >> 16) |
419 | (pixel & 0xFF00FF00);
420 | }
421 | }
422 | }
423 | }
424 | }
425 |
--------------------------------------------------------------------------------
/src/processing/video/LibraryLoader.java:
--------------------------------------------------------------------------------
1 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2 |
3 | /*
4 | Part of the Processing project - http://processing.org
5 |
6 | Copyright (c) 2012-22 The Processing Foundation
7 | Copyright (c) 2011-12 Ben Fry and Casey Reas
8 | GStreamer implementation ported from GSVideo library by Andres Colubri
9 | Library loader based on code by Tal Shalif
10 |
11 | This library is free software; you can redistribute it and/or
12 | modify it under the terms of the GNU Lesser General Public
13 | License as published by the Free Software Foundation; either
14 | version 2.1 of the License, or (at your option) any later version.
15 |
16 | This library is distributed in the hope that it will be useful,
17 | but WITHOUT ANY WARRANTY; without even the implied warranty of
18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 | Lesser General Public License for more details.
20 |
21 | You should have received a copy of the GNU Lesser General
22 | Public License along with this library; if not, write to the
23 | Free Software Foundation, Inc., 59 Temple Place, Suite 330,
24 | Boston, MA 02111-1307 USA
25 | */
26 |
27 | package processing.video;
28 |
29 | import java.util.HashMap;
30 | import java.util.Map;
31 |
32 | import com.sun.jna.Library;
33 | import com.sun.jna.Native;
34 | import com.sun.jna.Platform;
35 |
36 | /**
37 | * This class loads the gstreamer native libraries.
38 | *
39 | */
40 | public class LibraryLoader {
41 |
42 | public interface DummyLibrary extends Library {
43 | }
44 |
45 | private static LibraryLoader instance;
46 |
47 | static final Object[][] WINDOWS_MINGW_DEPENDENCIES = {
48 | // Base GStreamer native libraries for a COMPLETE MINGW installation
49 | { "avcodec-58", new String[] {}, false },
50 | { "avfilter-7", new String[] {}, false },
51 | { "avformat-58", new String[] {}, false },
52 | { "avutil-56", new String[] {}, false },
53 | { "libass-9", new String[] {}, false },
54 | { "libbz2", new String[] {}, false },
55 | { "libcairo-2", new String[] {}, false },
56 | { "libcairo-gobject-2", new String[] {}, false },
57 | { "libcairo-script-interpreter-2", new String[] {}, false },
58 | { "libcharset-1", new String[] {}, false },
59 | { "libcroco-0.6-3", new String[] {}, false },
60 | { "libcrypto-1_1-x64", new String[] {}, false },
61 | { "libdca-0", new String[] {}, false },
62 | { "libdv-4", new String[] {}, false },
63 | { "libexpat-1", new String[] {}, false },
64 | { "libffi-7", new String[] {}, false },
65 | { "libFLAC-8", new String[] {}, false },
66 | { "libfontconfig-1", new String[] {}, false },
67 | { "libfreetype-6", new String[] {}, false },
68 | { "libfribidi-0", new String[] {}, false },
69 | { "libgcc_s_seh-1", new String[] {}, false },
70 | { "libgdk_pixbuf-2.0-0", new String[] {}, false },
71 | { "libges-1.0-0", new String[] {}, false },
72 | { "libgio-2.0-0", new String[] {}, false },
73 | { "libglib-2.0-0", new String[] {}, false },
74 | { "libgmodule-2.0-0", new String[] {}, false },
75 | { "libgobject-2.0-0", new String[] {}, false },
76 | { "libgraphene-1.0-0", new String[] {}, false },
77 | { "libgstadaptivedemux-1.0-0", new String[] {}, false },
78 | { "libgstallocators-1.0-0", new String[] {}, false },
79 | { "libgstapp-1.0-0", new String[] {}, false },
80 | { "libgstaudio-1.0-0", new String[] {}, false },
81 | { "libgstbadaudio-1.0-0", new String[] {}, false },
82 | { "libgstbase-1.0-0", new String[] {}, false },
83 | { "libgstbasecamerabinsrc-1.0-0", new String[] {}, false },
84 | { "libgstcheck-1.0-0", new String[] {}, false },
85 | { "libgstcodecparsers-1.0-0", new String[] {}, false },
86 | { "libgstcodecs-1.0-0", new String[] {}, false },
87 | { "libgstcontroller-1.0-0", new String[] {}, false },
88 | { "libgstd3d11-1.0-0", new String[] {}, false },
89 | { "libgstfft-1.0-0", new String[] {}, false },
90 | { "libgstgl-1.0-0", new String[] {}, false },
91 | { "libgstinsertbin-1.0-0", new String[] {}, false },
92 | { "libgstisoff-1.0-0", new String[] {}, false },
93 | { "libgstmpegts-1.0-0", new String[] {}, false },
94 | { "libgstnet-1.0-0", new String[] {}, false },
95 | { "libgstpbutils-1.0-0", new String[] {}, false },
96 | { "libgstphotography-1.0-0", new String[] {}, false },
97 | { "libgstplay-1.0-0", new String[] {}, false },
98 | { "libgstplayer-1.0-0", new String[] {}, false },
99 | { "libgstreamer-1.0-0", new String[] {}, false },
100 | { "libgstriff-1.0-0", new String[] {}, false },
101 | { "libgstrtp-1.0-0", new String[] {}, false },
102 | { "libgstrtsp-1.0-0", new String[] {}, false },
103 | { "libgstrtspserver-1.0-0", new String[] {}, false },
104 | { "libgstsctp-1.0-0", new String[] {}, false },
105 | { "libgstsdp-1.0-0", new String[] {}, false },
106 | { "libgsttag-1.0-0", new String[] {}, false },
107 | { "libgsttranscoder-1.0-0", new String[] {}, false },
108 | { "libgsturidownloader-1.0-0", new String[] {}, false },
109 | { "libgstvalidate-1.0-0", new String[] {}, false },
110 | { "libgstvideo-1.0-0", new String[] {}, false },
111 | { "libgstwebrtc-1.0-0", new String[] {}, false },
112 | { "libgthread-2.0-0", new String[] {}, false },
113 | { "libharfbuzz-0", new String[] {}, false },
114 | { "libiconv-2", new String[] {}, false },
115 | { "libintl-8", new String[] {}, false },
116 | { "libjpeg-8", new String[] {}, false },
117 | { "libjson-glib-1.0-0", new String[] {}, false },
118 | { "libkate-1", new String[] {}, false },
119 | { "libmp3lame-0", new String[] {}, false },
120 | { "libmpg123-0", new String[] {}, false },
121 | { "libnice-10", new String[] {}, false },
122 | { "libogg-0", new String[] {}, false },
123 | { "liboggkate-1", new String[] {}, false },
124 | { "libopencore-amrnb-0", new String[] {}, false },
125 | { "libopencore-amrwb-0", new String[] {}, false },
126 | { "libopenh264-6", new String[] {}, false },
127 | { "libopenjp2", new String[] {}, false },
128 | { "libopus-0", new String[] {}, false },
129 | { "liborc-0.4-0", new String[] {}, false },
130 | { "liborc-test-0.4-0", new String[] {}, false },
131 | { "libpango-1.0-0", new String[] {}, false },
132 | { "libpangocairo-1.0-0", new String[] {}, false },
133 | { "libpangoft2-1.0-0", new String[] {}, false },
134 | { "libpangowin32-1.0-0", new String[] {}, false },
135 | { "libpixman-1-0", new String[] {}, false },
136 | { "libpng16-16", new String[] {}, false },
137 | { "libpsl-5", new String[] {}, false },
138 | { "librsvg-2-2", new String[] {}, false },
139 | { "librtmp-1", new String[] {}, false },
140 | { "libsbc-1", new String[] {}, false },
141 | { "libSoundTouch-1", new String[] {}, false },
142 | { "libsoup-2.4-1", new String[] {}, false },
143 | { "libspandsp-2", new String[] {}, false },
144 | { "libspeex-1", new String[] {}, false },
145 | { "libsqlite3-0", new String[] {}, false },
146 | { "libsrt", new String[] {}, false },
147 | { "libsrtp2-1", new String[] {}, false },
148 | { "libssl-1_1-x64", new String[] {}, false },
149 | { "libstdc++-6", new String[] {}, false },
150 | { "libtag", new String[] {}, false },
151 | { "libtheora-0", new String[] {}, false },
152 | { "libtheoradec-1", new String[] {}, false },
153 | { "libtheoraenc-1", new String[] {}, false },
154 | { "libtiff-5", new String[] {}, false },
155 | { "libturbojpeg-0", new String[] {}, false },
156 | { "libvo-aacenc-0", new String[] {}, false },
157 | { "libvorbis-0", new String[] {}, false },
158 | { "libvorbisenc-2", new String[] {}, false },
159 | { "libvorbisfile-3", new String[] {}, false },
160 | { "libwavpack", new String[] {}, false },
161 | { "libwebrtc_audio_processing-0", new String[] {}, false },
162 | { "libwinpthread-1", new String[] {}, false },
163 | { "libx264-157", new String[] {}, false },
164 | { "libxml2-2", new String[] {}, false },
165 | { "libz-1", new String[] {}, false },
166 | { "libzbar-0", new String[] {}, false },
167 | { "swresample-3", new String[] {}, false }
168 | };
169 |
170 | static final Object[][] WINDOWS_MSVC_DEPENDENCIES = {
171 | // Base GStreamer native libraries for a COMPLETE MSVC installation
172 | { "avcodec-58", new String[] {}, false },
173 | { "avfilter-7", new String[] {}, false },
174 | { "avformat-58", new String[] {}, false },
175 | { "avutil-56", new String[] {}, false },
176 | { "bz2", new String[] {}, false },
177 | { "cairo-2", new String[] {}, false },
178 | { "cairo-gobject-2", new String[] {}, false },
179 | { "cairo-script-interpreter-2", new String[] {}, false },
180 | { "dv-4", new String[] {}, false },
181 | { "ffi-7", new String[] {}, false },
182 | { "fontconfig-1", new String[] {}, false },
183 | { "fribidi-0", new String[] {}, false },
184 | { "gdk_pixbuf-2.0-0", new String[] {}, false },
185 | { "ges-1.0-0", new String[] {}, false },
186 | { "gio-2.0-0", new String[] {}, false },
187 | { "glib-2.0-0", new String[] {}, false },
188 | { "gmodule-2.0-0", new String[] {}, false },
189 | { "gobject-2.0-0", new String[] {}, false },
190 | { "graphene-1.0-0", new String[] {}, false },
191 | { "gstadaptivedemux-1.0-0", new String[] {}, false },
192 | { "gstallocators-1.0-0", new String[] {}, false },
193 | { "gstapp-1.0-0", new String[] {}, false },
194 | { "gstaudio-1.0-0", new String[] {}, false },
195 | { "gstbadaudio-1.0-0", new String[] {}, false },
196 | { "gstbase-1.0-0", new String[] {}, false },
197 | { "gstbasecamerabinsrc-1.0-0", new String[] {}, false },
198 | { "gstcheck-1.0-0", new String[] {}, false },
199 | { "gstcodecparsers-1.0-0", new String[] {}, false },
200 | { "gstcodecs-1.0-0", new String[] {}, false },
201 | { "gstcontroller-1.0-0", new String[] {}, false },
202 | { "gstd3d11-1.0-0", new String[] {}, false },
203 | { "gstfft-1.0-0", new String[] {}, false },
204 | { "gstgl-1.0-0", new String[] {}, false },
205 | { "gstinsertbin-1.0-0", new String[] {}, false },
206 | { "gstisoff-1.0-0", new String[] {}, false },
207 | { "gstmpegts-1.0-0", new String[] {}, false },
208 | { "gstnet-1.0-0", new String[] {}, false },
209 | { "gstpbutils-1.0-0", new String[] {}, false },
210 | { "gstphotography-1.0-0", new String[] {}, false },
211 | { "gstplay-1.0-0", new String[] {}, false },
212 | { "gstplayer-1.0-0", new String[] {}, false },
213 | { "gstreamer-1.0-0", new String[] {}, false },
214 | { "gstriff-1.0-0", new String[] {}, false },
215 | { "gstrtp-1.0-0", new String[] {}, false },
216 | { "gstrtsp-1.0-0", new String[] {}, false },
217 | { "gstrtspserver-1.0-0", new String[] {}, false },
218 | { "gstsctp-1.0-0", new String[] {}, false },
219 | { "gstsdp-1.0-0", new String[] {}, false },
220 | { "gsttag-1.0-0", new String[] {}, false },
221 | { "gsttranscoder-1.0-0", new String[] {}, false },
222 | { "gsturidownloader-1.0-0", new String[] {}, false },
223 | { "gstvalidate-1.0-0", new String[] {}, false },
224 | { "gstvideo-1.0-0", new String[] {}, false },
225 | { "gstwebrtc-1.0-0", new String[] {}, false },
226 | { "gstwinrt-1.0-0", new String[] {}, false },
227 | { "gthread-2.0-0", new String[] {}, false },
228 | { "harfbuzz", new String[] {}, false },
229 | { "intl-8", new String[] {}, false },
230 | { "json-glib-1.0-0", new String[] {}, false },
231 | { "libass-9", new String[] {}, false },
232 | { "libcharset-1", new String[] {}, false },
233 | { "libcroco-0.6-3", new String[] {}, false },
234 | { "libcrypto-1_1-x64", new String[] {}, false },
235 | { "libdca-0", new String[] {}, false },
236 | { "libexpat-1", new String[] {}, false },
237 | { "libFLAC-8", new String[] {}, false },
238 | { "libfreetype-6", new String[] {}, false },
239 | { "libgcc_s_seh-1", new String[] {}, false },
240 | { "libiconv-2", new String[] {}, false },
241 | { "libjpeg-8", new String[] {}, false },
242 | { "libkate-1", new String[] {}, false },
243 | { "libmp3lame-0", new String[] {}, false },
244 | { "libmpg123-0", new String[] {}, false },
245 | { "libogg-0", new String[] {}, false },
246 | { "liboggkate-1", new String[] {}, false },
247 | { "libopencore-amrnb-0", new String[] {}, false },
248 | { "libopencore-amrwb-0", new String[] {}, false },
249 | { "libpng16-16", new String[] {}, false },
250 | { "librsvg-2-2", new String[] {}, false },
251 | { "librtmp-1", new String[] {}, false },
252 | { "libsbc-1", new String[] {}, false },
253 | { "libspandsp-2", new String[] {}, false },
254 | { "libspeex-1", new String[] {}, false },
255 | { "libsrt", new String[] {}, false },
256 | { "libssl-1_1-x64", new String[] {}, false },
257 | { "libstdc++-6", new String[] {}, false },
258 | { "libtheora-0", new String[] {}, false },
259 | { "libtheoradec-1", new String[] {}, false },
260 | { "libtheoraenc-1", new String[] {}, false },
261 | { "libtiff-5", new String[] {}, false },
262 | { "libturbojpeg-0", new String[] {}, false },
263 | { "libvo-aacenc-0", new String[] {}, false },
264 | { "libvorbis-0", new String[] {}, false },
265 | { "libvorbisenc-2", new String[] {}, false },
266 | { "libvorbisfile-3", new String[] {}, false },
267 | { "libwinpthread-1", new String[] {}, false },
268 | { "libx264-157", new String[] {}, false },
269 | { "libxml2-2", new String[] {}, false },
270 | { "libzbar-0", new String[] {}, false },
271 | { "nice-10", new String[] {}, false },
272 | { "openh264-6", new String[] {}, false },
273 | { "openjp2", new String[] {}, false },
274 | { "opus-0", new String[] {}, false },
275 | { "orc-0.4-0", new String[] {}, false },
276 | { "orc-test-0.4-0", new String[] {}, false },
277 | { "pango-1.0-0", new String[] {}, false },
278 | { "pangocairo-1.0-0", new String[] {}, false },
279 | { "pangoft2-1.0-0", new String[] {}, false },
280 | { "pangowin32-1.0-0", new String[] {}, false },
281 | { "pixman-1-0", new String[] {}, false },
282 | { "psl-5", new String[] {}, false },
283 | { "soup-2.4-1", new String[] {}, false },
284 | { "sqlite3-0", new String[] {}, false },
285 | { "srtp2-1", new String[] {}, false },
286 | { "swresample-3", new String[] {}, false },
287 | { "wavpack", new String[] {}, false },
288 | { "z-1", new String[] {}, false }
289 | };
290 |
291 | static final Object[][] LINUX_DEPENDENCIES = {
292 | // Base GStreamer native libraries from a meson build
293 |
294 | // GLib libraries
295 | { "glib-2.0", new String[] {}, false },
296 | { "gobject-2.0", new String[] {}, false },
297 | { "gio-2.0", new String[] {}, false },
298 | { "gmodule-2.0", new String[] {}, false },
299 | { "gthread-2.0", new String[] {}, false },
300 |
301 | // Core GStreamer libraries... the order of these libraries is important (while it does
302 | // not seem to matter for Windows. For example, if gstbase comes before gstreamer, then
303 | // plugin scanning crashes with "cannot register existing type 'GstObject'" error
304 | { "gstreamer-1.0", new String[] {}, false },
305 | { "gstbase-1.0", new String[] {}, false },
306 | { "gsturidownloader-1.0", new String[] {}, false },
307 | { "gstadaptivedemux-1.0", new String[] {}, false },
308 | { "gstapp-1.0", new String[] {}, false },
309 | { "gsttag-1.0", new String[] {}, false },
310 | { "gstvideo-1.0", new String[] {}, false },
311 | { "gstaudio-1.0", new String[] {}, false },
312 | { "gstpbutils-1.0", new String[] {}, false },
313 | { "gstplay-1.0", new String[] {}, false },
314 | { "gstplayer-1.0", new String[] {}, false },
315 | { "gstbadaudio-1.0", new String[] {}, false },
316 | { "gstbasecamerabinsrc-1.0", new String[] {}, false },
317 | { "gstcheck-1.0", new String[] {}, false },
318 | { "gstcodecparsers-1.0", new String[] {}, false },
319 | { "gstcontroller-1.0", new String[] {}, false },
320 | { "gstfft-1.0", new String[] {}, false },
321 | { "gstinsertbin-1.0", new String[] {}, false },
322 | { "gstisoff-1.0", new String[] {}, false },
323 | { "gstmpegts-1.0", new String[] {}, false },
324 | { "gstnet-1.0", new String[] {}, false },
325 | { "gstphotography-1.0", new String[] {}, false },
326 | { "gstallocators-1.0", new String[] {}, false },
327 | { "libgstcodecs-1.0", new String[] {}, false },
328 | { "gstriff-1.0", new String[] {}, false },
329 | { "gstrtp-1.0", new String[] {}, false },
330 | { "gstrtsp-1.0", new String[] {}, false },
331 | { "gstsdp-1.0", new String[] {}, false },
332 | { "gstsctp-1.0", new String[] {}, false },
333 | { "gstrtspserver-1.0", new String[] {}, false },
334 | { "gstvalidate-1.0", new String[] {}, false },
335 | { "gstvalidate-default-overrides-1.0", new String[] {}, false },
336 | { "gstwebrtc-1.0", new String[] {}, false },
337 | { "gsttranscoder-1.0", new String[] {}, false },
338 |
339 | // External libraries
340 | { "xml2", new String[] {}, false },
341 | { "avutil", new String[] {}, false },
342 | { "swresample", new String[] {}, false },
343 | { "swscale", new String[] {}, false },
344 | { "avcodec", new String[] {}, false },
345 | { "avformat", new String[] {}, false },
346 | { "avresample", new String[] {}, false },
347 | { "avfilter", new String[] {}, false },
348 | { "avdevice", new String[] {}, false },
349 | { "avtp", new String[] {}, false },
350 | { "cairo-gobject", new String[] {}, false },
351 | { "cairo-script-interpreter", new String[] {}, false },
352 | { "cairo", new String[] {}, false },
353 | { "dv", new String[] {}, false },
354 | { "fdk_aac", new String[] {}, false },
355 | { "fontconfig", new String[] {}, false },
356 | { "freetype", new String[] {}, false },
357 | { "fribidi", new String[] {}, false },
358 | { "ges-1.0", new String[] {}, false },
359 | { "harfbuzz-gobject", new String[] {}, false },
360 | { "harfbuzz", new String[] {}, false },
361 | { "harfbuzz-subset", new String[] {}, false },
362 | { "jpeg", new String[] {}, false },
363 | { "json-glib-1.0", new String[] {}, false },
364 | { "microdns", new String[] {}, false },
365 | { "mp3lame", new String[] {}, false },
366 | { "nice", new String[] {}, false },
367 | { "ogg", new String[] {}, false },
368 | { "openh264", new String[] {}, false },
369 | { "openjp2", new String[] {}, false },
370 | { "opus", new String[] {}, false },
371 | { "orc-0.4", new String[] {}, false },
372 | { "orc-test-0.4", new String[] {}, false },
373 | { "pango-1.0", new String[] {}, false },
374 | // { "pangocairo-1.0", new String[] {}, false }, // Seems broken in 1.20.3
375 | { "pangoft2-1.0", new String[] {}, false },
376 | { "pixman-1", new String[] {}, false },
377 | { "png16", new String[] {}, false },
378 | { "postproc", new String[] {}, false },
379 | { "psl", new String[] {}, false },
380 | { "soup-2.4", new String[] {}, false },
381 | { "soup-gnome-2.4", new String[] {}, false },
382 | { "sqlite3", new String[] {}, false },
383 | { "vorbisenc", new String[] {}, false },
384 | { "vorbisfile", new String[] {}, false },
385 | { "vorbis", new String[] {}, false }
386 | };
387 |
388 | static Object[][] dependencies;
389 |
390 |
391 | private static final Map loadedMap =
392 | new HashMap<>();
393 |
394 |
395 | private static final int RECURSIVE_LOAD_MAX_DEPTH = 5;
396 |
397 |
398 | private LibraryLoader() {
399 | }
400 |
401 |
402 | private void preLoadLibs(int winBuildType) {
403 | if (Platform.isWindows()) {
404 | if (winBuildType == 0) {
405 | System.err.println("Seems like you are trying to use GStreamer native libraries older than 1.20, which are not supported.");
406 | return;
407 | } else if (winBuildType == 1) {
408 | dependencies = WINDOWS_MINGW_DEPENDENCIES;
409 | } else if (winBuildType == 2) {
410 | dependencies = WINDOWS_MSVC_DEPENDENCIES;
411 | }
412 |
413 | } else if (Platform.isLinux()) {
414 | dependencies = LINUX_DEPENDENCIES;
415 | } else {
416 | // No need for dependencies pre-loading on MacOS
417 | return;
418 | }
419 |
420 | for (Object[] a : dependencies) {
421 | load(a[0].toString(), DummyLibrary.class, true, 0, (Boolean) a[2]);
422 | }
423 | }
424 |
425 |
426 | static private String[] findDeps(String name) {
427 | for (Object[] a : dependencies) {
428 | if (name.equals(a[0])) {
429 | return (String[]) a[1];
430 | }
431 | }
432 |
433 | // library dependency load chain unspecified - probably client call
434 | return new String[] { };
435 | }
436 |
437 |
438 | public Object load(String name, Class> clazz, boolean reqLib) {
439 | return load(name, clazz, true, 0, reqLib);
440 | }
441 |
442 |
443 | private Object load(String name, Class> clazz, boolean forceReload,
444 | int depth, boolean reqLib) {
445 |
446 | assert depth < RECURSIVE_LOAD_MAX_DEPTH : String.format(
447 | "recursive max load depth %s has been exceeded", depth);
448 |
449 | Object library = loadedMap.get(name);
450 |
451 | if (null == library || forceReload) {
452 |
453 | // Logger.getAnonymousLogger().info(String.format("%" + ((depth + 1) * 2)
454 | // + "sloading %s", "->", name));
455 |
456 | try {
457 | String[] deps = findDeps(name);
458 |
459 | for (String lib : deps) {
460 | load(lib, DummyLibrary.class, false, depth + 1, reqLib);
461 | }
462 |
463 | library = loadLibrary(name, clazz, reqLib);
464 |
465 | if (library != null) {
466 | loadedMap.put(name, library);
467 | }
468 | } catch (Exception e) {
469 | if (reqLib)
470 | throw new RuntimeException(String.format(
471 | "can not load required library %s", name, e));
472 | else
473 | System.out.println(String.format("can not load library %s", name, e));
474 | }
475 | }
476 |
477 | return library;
478 | }
479 |
480 |
481 | private static Object loadLibrary(String name, Class> clazz,
482 | boolean reqLib) {
483 |
484 | // Logger.getAnonymousLogger().info(String.format("loading %s", name));
485 |
486 | String[] nameFormats;
487 | nameFormats = Platform.isWindows() ? new String[] { "lib%s", "lib%s-0",
488 | "%s" } : new String[] { "%s-0", "%s" };
489 |
490 | UnsatisfiedLinkError linkError = null;
491 |
492 | for (String fmt : nameFormats) {
493 | try {
494 | String s = String.format(fmt, name);
495 | //System.out.println("Trying to load library file " + s);
496 | Object obj = Native.loadLibrary(s, clazz);
497 | //System.out.println("Loaded library " + s + " successfully!");
498 | return obj;
499 | } catch (UnsatisfiedLinkError ex) {
500 | linkError = ex;
501 | }
502 | }
503 |
504 | if (reqLib)
505 | throw new UnsatisfiedLinkError(
506 | String.format(
507 | "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
508 | "-Djna.library.path=%s. Last error:%s",
509 | name, System.getProperty("jna.library.path"), linkError));
510 | else {
511 | System.out.println(String.format(
512 | "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
513 | "-Djna.library.path=%s. Last error:%s",
514 | name, System.getProperty("jna.library.path"), linkError));
515 | return null;
516 | }
517 | }
518 |
519 |
520 | public static synchronized LibraryLoader getInstance(int winBuildType) {
521 | if (null == instance) {
522 | instance = new LibraryLoader();
523 | instance.preLoadLibs(winBuildType);
524 | }
525 | return instance;
526 | }
527 | }
528 |
--------------------------------------------------------------------------------
/src/processing/video/Movie.java:
--------------------------------------------------------------------------------
1 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2 |
3 | /*
4 | Part of the Processing project - http://processing.org
5 |
6 | Copyright (c) 2012-22 The Processing Foundation
7 | Copyright (c) 2004-12 Ben Fry and Casey Reas
8 | GStreamer implementation ported from GSVideo library by Andres Colubri
9 | The previous version of this code was developed by Hernando Barragan
10 |
11 | This library is free software; you can redistribute it and/or
12 | modify it under the terms of the GNU Lesser General Public
13 | License as published by the Free Software Foundation; either
14 | version 2.1 of the License, or (at your option) any later version.
15 |
16 | This library is distributed in the hope that it will be useful,
17 | but WITHOUT ANY WARRANTY; without even the implied warranty of
18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 | Lesser General Public License for more details.
20 |
21 | You should have received a copy of the GNU Lesser General
22 | Public License along with this library; if not, write to the
23 | Free Software Foundation, Inc., 59 Temple Place, Suite 330,
24 | Boston, MA 02111-1307 USA
25 | */
26 |
27 | package processing.video;
28 |
29 | import processing.core.*;
30 |
31 | import java.io.*;
32 | import java.net.URI;
33 | import java.nio.*;
34 | import java.util.EnumSet;
35 | import java.util.concurrent.TimeUnit;
36 | import java.util.concurrent.locks.Lock;
37 | import java.util.concurrent.locks.ReentrantLock;
38 | import java.lang.reflect.*;
39 |
40 | import org.freedesktop.gstreamer.*;
41 | import org.freedesktop.gstreamer.Buffer;
42 | import org.freedesktop.gstreamer.elements.*;
43 | import org.freedesktop.gstreamer.event.SeekFlags;
44 | import org.freedesktop.gstreamer.event.SeekType;
45 |
46 |
47 | /**
48 | * Datatype for storing and playing movies. Movies must be located in the sketch's data folder
49 | * or an accessible place on the network to load without an error.
50 | *
51 | * @webref movie
52 | * @webBrief Datatype for storing and playing movies.
53 | * @usage application
54 | */
55 | public class Movie extends PImage implements PConstants {
56 | public static String[] supportedProtocols = { "http", "https" };
57 |
58 | public String filename;
59 | public PlayBin playbin;
60 |
61 | // The source resolution and framerate of the file
62 | public int sourceWidth;
63 | public int sourceHeight;
64 | public float sourceFrameRate;
65 |
66 | public float frameRate; // the current playback fps
67 | protected float rate; // speed multiplier (1.0: frameRate = nativeFrameRate)
68 |
69 | protected float volume;
70 |
71 | protected boolean playing = false;
72 | protected boolean paused = false;
73 | protected boolean repeat = false;
74 |
75 | protected Method movieEventMethod;
76 | protected Object eventHandler;
77 |
78 | protected boolean available;
79 | protected boolean ready;
80 | protected boolean newFrame;
81 |
82 | protected AppSink rgbSink = null;
83 | protected int[] copyPixels = null;
84 |
85 | protected boolean firstFrame = true;
86 |
87 | protected boolean useBufferSink = false;
88 | protected boolean outdatedPixels = true;
89 | protected Object bufferSink;
90 | protected Method sinkCopyMethod;
91 | protected Method sinkSetMethod;
92 | protected Method sinkDisposeMethod;
93 | protected Method sinkGetMethod;
94 |
95 | private NewSampleListener newSampleListener;
96 | private NewPrerollListener newPrerollListener;
97 | private final Lock bufferLock = new ReentrantLock();
98 |
99 |
100 | /**
101 | * Creates an instance of Movie loading the movie from filename.
102 | *
103 | * @param parent PApplet
104 | * @param filename String
105 | */
106 | public Movie(PApplet parent, String filename) {
107 | super(0, 0, ARGB);
108 | initGStreamer(parent, filename);
109 | }
110 |
111 |
112 | /**
113 | * Disposes all the native resources associated to this movie.
114 | *
115 | * NOTE: This is not official API and may/will be removed at any time.
116 | */
117 | public void dispose() {
118 | if (playbin != null) {
119 | try {
120 | if (playbin.isPlaying()) {
121 | playbin.stop();
122 | playbin.getState();
123 | }
124 | } catch (Exception e) {
125 | }
126 |
127 | pixels = null;
128 |
129 | rgbSink.disconnect(newSampleListener);
130 | rgbSink.disconnect(newPrerollListener);
131 | rgbSink.dispose();
132 | playbin.setState(org.freedesktop.gstreamer.State.NULL);
133 | playbin.getState();
134 | playbin.getBus().dispose();
135 | playbin.dispose();
136 |
137 | parent.g.removeCache(this);
138 | parent.unregisterMethod("dispose", this);
139 | parent.unregisterMethod("post", this);
140 | }
141 | }
142 |
143 |
144 | /**
145 | * Finalizer of the class.
146 | */
147 | protected void finalize() throws Throwable {
148 | try {
149 | dispose();
150 | } finally {
151 | // super.finalize();
152 | }
153 | }
154 |
155 |
156 | /**
157 | * Sets how often frames are read from the movie. Setting the fps
158 | * parameter to 4, for example, will cause 4 frames to be read per second.
159 | *
160 | * @webref movie
161 | * @webBrief Sets how often frames are read from the movie.
162 | * @usage web_application
163 | * @param ifps speed of the movie in frames per second
164 | * @brief Sets the target frame rate
165 | */
166 | public void frameRate(float ifps) {
167 | // We calculate the target ratio in the case both the
168 | // current and target framerates are valid (greater than
169 | // zero), otherwise we leave it as 1.
170 | float f = (0 < ifps && 0 < frameRate) ? ifps / frameRate : 1;
171 |
172 | long t = playbin.queryPosition(TimeUnit.NANOSECONDS);
173 | long start, stop;
174 | if (rate > 0) {
175 | start = t;
176 | stop = -1;
177 | } else {
178 | start = 0;
179 | stop = t;
180 | }
181 |
182 | seek(rate * f, start, stop);
183 |
184 | frameRate = ifps;
185 | }
186 |
187 |
188 | /**
189 | * Sets the relative playback speed of the movie. The rate
190 | * parameters sets the speed where 2.0 will play the movie twice as fast,
191 | * 0.5 will play at half the speed, and -1 will play the movie in normal
192 | * speed in reverse.
193 | *
194 | * @webref movie
195 | * @webBrief Sets the relative playback speed of the movie.
196 | * @usage web_application
197 | * @param irate speed multiplier for movie playback
198 | * @brief Sets the relative playback speed
199 | */
200 | public void speed(float irate) {
201 | // If the frameRate() method is called continuously with very similar
202 | // rate values, playback might become sluggish. This condition attempts
203 | // to take care of that.
204 | if (PApplet.abs(rate - irate) > 0.1) {
205 | rate = irate;
206 | frameRate(frameRate); // The framerate is the same, but the rate (speed) could be different.
207 | }
208 | }
209 |
210 |
211 | /**
212 | * Returns the length of the movie in seconds. If the movie is 1 minute and
213 | * 20 seconds long the value returned will be 80.0.
214 | *
215 | * @webref movie
216 | * @webBrief Returns the length of the movie in seconds.
217 | * @usage web_application
218 | * @brief Returns length of movie in seconds
219 | */
220 | public float duration() {
221 | long nanosec = playbin.queryDuration(TimeUnit.NANOSECONDS);
222 | return Video.nanoSecToSecFrac(nanosec);
223 | }
224 |
225 |
226 | /**
227 | * Returns the location of the playback head in seconds. For example, if
228 | * the movie has been playing for 4 seconds, the number 4.0 will be returned.
229 | *
230 | * @webref movie
231 | * @webBrief Returns the location of the playback head in seconds.
232 | * @usage web_application
233 | * @brief Returns location of playback head in units of seconds
234 | */
235 | public float time() {
236 | long nanosec = playbin.queryPosition(TimeUnit.NANOSECONDS);
237 | return Video.nanoSecToSecFrac(nanosec);
238 | }
239 |
240 |
241 | /**
242 | * Jumps to a specific location within a movie. The parameter where
243 | * is in terms of seconds. For example, if the movie is 12.2 seconds long,
244 | * calling jump(6.1) would go to the middle of the movie.
245 | *
246 | * @webref movie
247 | * @webBrief Jumps to a specific location within a movie.
248 | * @usage web_application
249 | * @param where position to jump to specified in seconds
250 | * @brief Jumps to a specific location
251 | */
252 | public void jump(float where) {
253 | setReady();
254 |
255 | // Round the time to a multiple of the source framerate, in
256 | // order to eliminate stutter. Suggested by Daniel Shiffman
257 | if (sourceFrameRate != -1) {
258 | int frame = (int)(where * sourceFrameRate);
259 | where = frame / sourceFrameRate;
260 | }
261 |
262 | long pos = Video.secToNanoLong(where);
263 | seek(rate, pos, -1);
264 | }
265 |
266 |
267 | /**
268 | * Returns "true" when a new movie frame is available to read.
269 | *
270 | * @webref movie
271 | * @webBrief Returns "true" when a new movie frame is available to read.
272 | * @usage web_application
273 | * @brief Returns "true" when a new movie frame is available to read.
274 | */
275 | public boolean available() {
276 | return available;
277 | }
278 |
279 |
280 | /**
281 | * Plays a movie one time and stops at the last frame.
282 | *
283 | * @webref movie
284 | * @webBrief Plays a movie one time and stops at the last frame.
285 | * @usage web_application
286 | * @brief Plays movie one time and stops at the last frame
287 | */
288 | public void play() {
289 | setReady();
290 |
291 | playbin.play();
292 | playbin.getState();
293 |
294 | playing = true;
295 | paused = false;
296 | }
297 |
298 |
299 | /**
300 | * Plays a movie continuously, restarting it when it's over.
301 | *
302 | * @webref movie
303 | * @webBrief Plays a movie continuously, restarting it when it's over.
304 | * @usage web_application
305 | * @brief Plays a movie continuously, restarting it when it's over.
306 | */
307 | public void loop() {
308 | repeat = true;
309 | play();
310 | }
311 |
312 |
313 | /**
314 | * If a movie is looping, calling noLoop() will cause it to play until the
315 | * end and then stop on the last frame.
316 | *
317 | * @webref movie
318 | * @webBrief If a movie is looping, this will cause it to play until the
319 | * end and then stop on the last frame.
320 | * @usage web_application
321 | * @brief Stops the movie from looping
322 | */
323 | public void noLoop() {
324 | setReady();
325 |
326 | repeat = false;
327 | }
328 |
329 |
330 | /**
331 | * Pauses a movie during playback. If a movie is started again with play(),
332 | * it will continue from where it was paused.
333 | *
334 | * @webref movie
335 | * @webBrief Pauses a movie during playback.
336 | * @usage web_application
337 | * @brief Pauses the movie
338 | */
339 | public void pause() {
340 | setReady();
341 |
342 | playbin.pause();
343 | playbin.getState();
344 |
345 | playing = false;
346 | paused = true;
347 | }
348 |
349 |
350 | /**
351 | * Stops a movie from continuing. The playback returns to the beginning so
352 | * when a movie is played, it will begin from the beginning.
353 | *
354 | * @webref movie
355 | * @webBrief Stops a movie from continuing.
356 | * @usage web_application
357 | * @brief Stops the movie
358 | */
359 | public void stop() {
360 | setReady();
361 |
362 | playbin.stop();
363 | playbin.getState();
364 |
365 | playing = false;
366 | paused = false;
367 | }
368 |
369 |
370 | /**
371 | * Reads the current frame of the movie.
372 | *
373 | * @webref movie
374 | * @webBrief Reads the current frame of the movie.
375 | * @usage web_application
376 | * @brief Reads the current frame
377 | */
378 | public synchronized void read() {
379 | if (firstFrame) {
380 | super.init(sourceWidth, sourceHeight, ARGB, 1);
381 | firstFrame = false;
382 | }
383 |
384 | if (useBufferSink) {
385 |
386 | if (bufferSink == null) {
387 | Object cache = parent.g.getCache(Movie.this);
388 | if (cache != null) {
389 | setBufferSink(cache);
390 | getSinkMethods();
391 | }
392 | }
393 |
394 | } else {
395 | int[] temp = pixels;
396 | pixels = copyPixels;
397 | updatePixels();
398 | copyPixels = temp;
399 | }
400 |
401 | available = false;
402 | newFrame = true;
403 | }
404 |
405 |
406 | /**
407 | * Change the volume. Values are from 0 to 1.
408 | *
409 | * @param float v
410 | */
411 | public void volume(float v) {
412 | if (playing && PApplet.abs(volume - v) > 0.001f) {
413 |
414 | playbin.setVolume(v);
415 | playbin.getState();
416 |
417 | volume = v;
418 | }
419 | }
420 |
421 |
422 | /**
423 | * Loads the pixel data for the image into its pixels[] array.
424 | */
425 | @Override
426 | public synchronized void loadPixels() {
427 | super.loadPixels();
428 | if (useBufferSink && bufferSink != null) {
429 | try {
430 | // sinkGetMethod will copy the latest buffer to the pixels array,
431 | // and the pixels will be copied to the texture when the OpenGL
432 | // renderer needs to draw it.
433 | sinkGetMethod.invoke(bufferSink, new Object[] { pixels });
434 | } catch (Exception e) {
435 | e.printStackTrace();
436 | }
437 | outdatedPixels = false;
438 | }
439 | }
440 |
441 |
442 | /**
443 | * Reads the color of any pixel or grabs a section of an image.
444 | */
445 | @Override
446 | public int get(int x, int y) {
447 | if (outdatedPixels) loadPixels();
448 | return super.get(x, y);
449 | }
450 |
451 |
452 | /**
453 | * @param w width of pixel rectangle to get
454 | * @param h height of pixel rectangle to get
455 | */
456 | public PImage get(int x, int y, int w, int h) {
457 | if (outdatedPixels) loadPixels();
458 | return super.get(x, y, w, h);
459 | }
460 |
461 |
462 | @Override
463 | public PImage copy() {
464 | if (outdatedPixels) loadPixels();
465 | return super.copy();
466 | }
467 |
468 |
469 | @Override
470 | protected void getImpl(int sourceX, int sourceY,
471 | int sourceWidth, int sourceHeight,
472 | PImage target, int targetX, int targetY) {
473 | if (outdatedPixels) loadPixels();
474 | super.getImpl(sourceX, sourceY, sourceWidth, sourceHeight,
475 | target, targetX, targetY);
476 | }
477 |
478 |
479 | /**
480 | * Check if this movie object is currently playing.
481 | */
482 | public boolean isPlaying() {
483 | return playing;
484 | }
485 |
486 |
487 | /**
488 | * Check if this movie object is currently paused.
489 | */
490 | public boolean isPaused() {
491 | return paused;
492 | }
493 |
494 |
495 | /**
496 | * Check if this movie object is currently looping.
497 | */
498 | public boolean isLooping() {
499 | return repeat;
500 | }
501 |
502 |
503 | ////////////////////////////////////////////////////////////
504 |
505 | // Initialization methods.
506 |
507 |
508 | protected void initGStreamer(PApplet parent, String filename) {
509 | this.parent = parent;
510 |
511 | Video.init();
512 | playbin = null;
513 |
514 | File file;
515 |
516 | // First check to see if this can be read locally from a file.
517 | try {
518 | try {
519 | // Try a local file using the dataPath. usually this will
520 | // work ok, but sometimes the dataPath is inside a jar file,
521 | // which is less fun, so this will crap out.
522 | file = new File(parent.dataPath(filename));
523 | if (file.exists()) {
524 | playbin = new PlayBin("Movie Player");
525 | playbin.setInputFile(file);
526 | }
527 | } catch (Exception e) {
528 | e.printStackTrace();
529 | }
530 |
531 | // Read from a file just hanging out in the local folder.
532 | // this might happen when the video library is used with some
533 | // other application, or the person enters a full path name
534 | if (playbin == null) {
535 | try {
536 | file = new File(filename);
537 | if (file.exists()) {
538 | playbin = new PlayBin("Movie Player");
539 | playbin.setInputFile(file);
540 | }
541 | } catch (Exception e) {
542 | e.printStackTrace();
543 | }
544 | }
545 |
546 | if (playbin == null) {
547 | // Try network read...
548 | for (int i = 0; i < supportedProtocols.length; i++) {
549 | if (filename.startsWith(supportedProtocols[i] + "://")) {
550 | try {
551 | playbin = new PlayBin("Movie Player");
552 | playbin.setURI(URI.create(filename));
553 | break;
554 | } catch (Exception e) {
555 | e.printStackTrace();
556 | }
557 | }
558 | }
559 | }
560 | } catch (SecurityException se) {
561 | // online, whups. catch the security exception out here rather than
562 | // doing it three times (or whatever) for each of the cases above.
563 | }
564 |
565 | if (playbin == null) {
566 | parent.die("Could not load movie file " + filename, null);
567 | }
568 |
569 | initSink();
570 |
571 | playbin.setVideoSink(rgbSink);
572 | makeBusConnections(playbin.getBus());
573 |
574 | // We've got a valid movie! let's rock.
575 | try {
576 | this.filename = filename; // for error messages
577 |
578 | // register methods
579 | parent.registerMethod("dispose", this);
580 | parent.registerMethod("post", this);
581 |
582 | setEventHandlerObject(parent);
583 |
584 | sourceWidth = sourceHeight = 0;
585 | sourceFrameRate = -1;
586 | frameRate = -1;
587 | rate = 1.0f;
588 | volume = -1;
589 | ready = false;
590 | } catch (Exception e) {
591 | e.printStackTrace();
592 | }
593 | }
594 |
595 |
596 | /**
597 | * Uses a generic object as handler of the movie. This object should have a
598 | * movieEvent method that receives a Movie argument. This method will
599 | * be called upon a new frame read event.
600 | *
601 | */
602 | protected void setEventHandlerObject(Object obj) {
603 | eventHandler = obj;
604 |
605 | try {
606 | movieEventMethod = eventHandler.getClass().getMethod("movieEvent", Movie.class);
607 | return;
608 | } catch (Exception e) {
609 | // no such method, or an error... which is fine, just ignore
610 | }
611 |
612 | // movieEvent can alternatively be defined as receiving an Object, to allow
613 | // Processing mode implementors to support the video library without linking
614 | // to it at build-time.
615 | try {
616 | movieEventMethod = eventHandler.getClass().getMethod("movieEvent", Object.class);
617 | } catch (Exception e) {
618 | // no such method, or an error... which is fine, just ignore
619 | }
620 | }
621 |
622 |
623 | protected void initSink() {
624 | rgbSink = new AppSink("movie sink");
625 | rgbSink.set("emit-signals", true);
626 | newSampleListener = new NewSampleListener();
627 | newPrerollListener = new NewPrerollListener();
628 | rgbSink.connect(newSampleListener);
629 | rgbSink.connect(newPrerollListener);
630 |
631 | useBufferSink = Video.useGLBufferSink && parent.g.isGL();
632 | if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
633 | if (useBufferSink) {
634 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=RGBA"));
635 | } else {
636 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=BGRA"));
637 | }
638 | } else {
639 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=ARGB"));
640 | }
641 | }
642 |
643 |
644 | protected void setReady() {
645 | if (!ready) {
646 | playbin.setState(org.freedesktop.gstreamer.State.READY);
647 | newFrame = false;
648 | ready = true;
649 | }
650 | }
651 |
652 |
653 | private void makeBusConnections(Bus bus) {
654 | bus.connect(new Bus.ERROR() {
655 | public void errorMessage(GstObject arg0, int arg1, String arg2) {
656 | System.err.println(arg0 + " : " + arg2);
657 | }
658 | });
659 | bus.connect(new Bus.EOS() {
660 | public void endOfStream(GstObject arg0) {
661 | if (repeat) {
662 | if (0 < rate) {
663 | // Playing forward, so we return to the beginning
664 | jump(0);
665 | } else {
666 | // Playing backwards, so we go to the end.
667 | jump(duration());
668 | }
669 |
670 | // The rate is set automatically to 1 when restarting the
671 | // stream, so we need to call frameRate in order to reset
672 | // to the latest fps rate.
673 | frameRate(frameRate);
674 | } else {
675 | playing = false;
676 | }
677 | }
678 | });
679 | }
680 |
681 |
682 |
683 | ////////////////////////////////////////////////////////////
684 |
685 | // Stream event handling.
686 |
687 |
688 | private void seek(double rate, long start, long stop) {
689 | Gst.invokeLater(new Runnable() {
690 | public void run() {
691 | boolean res;
692 | if (stop == -1) {
693 | res = playbin.seek(rate, Format.TIME, EnumSet.of(SeekFlags.FLUSH, SeekFlags.ACCURATE), SeekType.SET, start, SeekType.NONE, stop);
694 | } else {
695 | res = playbin.seek(rate, Format.TIME, EnumSet.of(SeekFlags.FLUSH, SeekFlags.ACCURATE), SeekType.SET, start, SeekType.SET, stop);
696 | }
697 | if (!res) {
698 | PGraphics.showWarning("Seek operation failed.");
699 | }
700 | }
701 | });
702 | }
703 |
704 |
705 | private void fireMovieEvent() {
706 | if (movieEventMethod != null) {
707 | try {
708 | movieEventMethod.invoke(eventHandler, this);
709 | } catch (Exception e) {
710 | System.err.println("error, disabling movieEvent() for " + filename);
711 | e.printStackTrace();
712 | movieEventMethod = null;
713 | }
714 | }
715 | }
716 |
717 |
718 | ////////////////////////////////////////////////////////////
719 |
720 | // Buffer source interface.
721 |
722 |
723 | /**
724 | * Sets the object to use as destination for the frames read from the stream.
725 | * The color conversion mask is automatically set to the one required to
726 | * copy the frames to OpenGL.
727 | *
728 | * NOTE: This is not official API and may/will be removed at any time.
729 | *
730 | * @param Object dest
731 | */
732 | public void setBufferSink(Object sink) {
733 | bufferSink = sink;
734 | }
735 |
736 |
737 | /**
738 | * NOTE: This is not official API and may/will be removed at any time.
739 | */
740 | public boolean hasBufferSink() {
741 | return bufferSink != null;
742 | }
743 |
744 |
745 | /**
746 | * NOTE: This is not official API and may/will be removed at any time.
747 | */
748 | public synchronized void disposeBuffer(Object buf) {
749 | ((Buffer)buf).dispose();
750 | }
751 |
752 |
753 | protected void getSinkMethods() {
754 | try {
755 | sinkCopyMethod = bufferSink.getClass().getMethod("copyBufferFromSource",
756 | new Class[] { Object.class, ByteBuffer.class, int.class, int.class });
757 | } catch (Exception e) {
758 | throw new RuntimeException("Movie: provided sink object doesn't have a " +
759 | "copyBufferFromSource method.");
760 | }
761 |
762 | try {
763 | sinkSetMethod = bufferSink.getClass().getMethod("setBufferSource",
764 | new Class[] { Object.class });
765 | sinkSetMethod.invoke(bufferSink, new Object[] { this });
766 | } catch (Exception e) {
767 | throw new RuntimeException("Movie: provided sink object doesn't have a " +
768 | "setBufferSource method.");
769 | }
770 |
771 | try {
772 | sinkDisposeMethod = bufferSink.getClass().getMethod("disposeSourceBuffer",
773 | new Class[] { });
774 | } catch (Exception e) {
775 | throw new RuntimeException("Movie: provided sink object doesn't have " +
776 | "a disposeSourceBuffer method.");
777 | }
778 |
779 | try {
780 | sinkGetMethod = bufferSink.getClass().getMethod("getBufferPixels",
781 | new Class[] { int[].class });
782 | } catch (Exception e) {
783 | throw new RuntimeException("Movie: provided sink object doesn't have " +
784 | "a getBufferPixels method.");
785 | }
786 | }
787 |
788 |
789 | public synchronized void post() {
790 | if (useBufferSink && sinkDisposeMethod != null) {
791 | try {
792 | sinkDisposeMethod.invoke(bufferSink, new Object[] {});
793 | } catch (Exception e) {
794 | e.printStackTrace();
795 | }
796 | }
797 | }
798 |
799 |
800 | ////////////////////////////////////////////////////////////
801 |
802 | // Listener of GStreamer events.
803 |
804 |
805 | private class NewSampleListener implements AppSink.NEW_SAMPLE {
806 |
807 | @Override
808 | public FlowReturn newSample(AppSink sink) {
809 | Sample sample = sink.pullSample();
810 |
811 | // Pull out metadata from caps
812 | Structure capsStruct = sample.getCaps().getStructure(0);
813 | sourceWidth = capsStruct.getInteger("width");
814 | sourceHeight = capsStruct.getInteger("height");
815 | Fraction fps = capsStruct.getFraction("framerate");
816 | sourceFrameRate = (float)fps.numerator / fps.denominator;
817 |
818 | // Set the playback rate to the file's native framerate
819 | // unless the user has already set a custom one
820 | if (frameRate == -1.0) {
821 | frameRate = sourceFrameRate;
822 | }
823 |
824 | Buffer buffer = sample.getBuffer();
825 | ByteBuffer bb = buffer.map(false);
826 | if (bb != null) {
827 |
828 | // If the EDT is still copying data from the buffer, just drop this frame
829 | if (!bufferLock.tryLock()) {
830 | return FlowReturn.OK;
831 | }
832 |
833 | available = true;
834 | if (useBufferSink && bufferSink != null) { // The native buffer from GStreamer is copied to the buffer sink.
835 |
836 | try {
837 | sinkCopyMethod.invoke(bufferSink, new Object[] { buffer, bb, sourceWidth, sourceHeight });
838 | if (playing) {
839 | fireMovieEvent();
840 | }
841 | } catch (Exception e) {
842 | e.printStackTrace();
843 | } finally {
844 | bufferLock.unlock();
845 | }
846 |
847 | } else {
848 | IntBuffer rgb = bb.asIntBuffer();
849 |
850 | if (copyPixels == null) {
851 | copyPixels = new int[sourceWidth * sourceHeight];
852 | }
853 |
854 | try {
855 | rgb.get(copyPixels, 0, width * height);
856 | if (playing) {
857 | fireMovieEvent();
858 | }
859 | } finally {
860 | bufferLock.unlock();
861 | }
862 |
863 | }
864 |
865 | buffer.unmap();
866 | }
867 | sample.dispose();
868 | return FlowReturn.OK;
869 | }
870 | }
871 |
872 |
873 | private class NewPrerollListener implements AppSink.NEW_PREROLL {
874 | @Override
875 | public FlowReturn newPreroll(AppSink sink) {
876 | Sample sample = sink.pullPreroll();
877 |
878 | // Pull out metadata from caps
879 | Structure capsStruct = sample.getCaps().getStructure(0);
880 | sourceWidth = capsStruct.getInteger("width");
881 | sourceHeight = capsStruct.getInteger("height");
882 | Fraction fps = capsStruct.getFraction("framerate");
883 | sourceFrameRate = (float)fps.numerator / fps.denominator;
884 |
885 | // Set the playback rate to the file's native framerate
886 | // unless the user has already set a custom one
887 | if (frameRate == -1.0) {
888 | frameRate = sourceFrameRate;
889 | }
890 |
891 | sample.dispose();
892 | return FlowReturn.OK;
893 | }
894 | }
895 | }
896 |
--------------------------------------------------------------------------------
/src/processing/video/Capture.java:
--------------------------------------------------------------------------------
1 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
2 |
3 | /*
4 | Part of the Processing project - http://processing.org
5 |
6 | Copyright (c) 2012-22 The Processing Foundation
7 | Copyright (c) 2004-12 Ben Fry and Casey Reas
8 | GStreamer implementation ported from GSVideo library by Andres Colubri
9 | The previous version of this code was developed by Hernando Barragan
10 |
11 |
12 | This library is free software; you can redistribute it and/or
13 | modify it under the terms of the GNU Lesser General Public
14 | License as published by the Free Software Foundation; either
15 | version 2.1 of the License, or (at your option) any later version.
16 |
17 | This library is distributed in the hope that it will be useful,
18 | but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | Lesser General Public License for more details.
21 |
22 | You should have received a copy of the GNU Lesser General
23 | Public License along with this library; if not, write to the
24 | Free Software Foundation, Inc., 59 Temple Place, Suite 330,
25 | Boston, MA 02111-1307 USA
26 | */
27 |
28 | package processing.video;
29 |
30 | import processing.core.*;
31 |
32 | import java.nio.*;
33 | import java.util.ArrayList;
34 | import java.util.concurrent.TimeUnit;
35 | import java.util.concurrent.locks.Lock;
36 | import java.util.concurrent.locks.ReentrantLock;
37 | import java.util.EnumSet;
38 | import java.util.List;
39 | import java.lang.reflect.*;
40 |
41 | import org.freedesktop.gstreamer.*;
42 | import org.freedesktop.gstreamer.Buffer;
43 | import org.freedesktop.gstreamer.device.*;
44 | import org.freedesktop.gstreamer.elements.*;
45 | import org.freedesktop.gstreamer.event.SeekFlags;
46 | import org.freedesktop.gstreamer.event.SeekType;
47 |
48 |
49 | /**
50 | * Datatype for storing and manipulating video frames from an attached
51 | * capture device such as a camera. Use Capture.list() to show
52 | * the names of any attached devices. Using the version of the constructor
53 | * without name will attempt to use the last device used by a
54 | * QuickTime program.
55 | *
56 | * @webref capture
57 | * @webBrief Datatype for storing and manipulating video frames from an
58 | * attached capture device such as a camera.
59 | * @usage application
60 | */
61 | public class Capture extends PImage implements PConstants {
62 | public Pipeline pipeline;
63 |
64 | // The source resolution and framerate of the device
65 | public int sourceWidth;
66 | public int sourceHeight;
67 | public float sourceFrameRate;
68 |
69 | public float frameRate;
70 | protected float rate;
71 |
72 | protected boolean capturing = false;
73 |
74 | protected Method captureEventMethod;
75 | protected Object eventHandler;
76 |
77 | protected boolean available;
78 | protected boolean ready;
79 | protected boolean newFrame;
80 |
81 | protected AppSink rgbSink = null;
82 | protected int[] copyPixels = null;
83 |
84 | protected boolean firstFrame = true;
85 |
86 | protected boolean useBufferSink = false;
87 | protected boolean outdatedPixels = true;
88 | protected Object bufferSink;
89 | protected Method sinkCopyMethod;
90 | protected Method sinkSetMethod;
91 | protected Method sinkDisposeMethod;
92 | protected Method sinkGetMethod;
93 |
94 | protected String device;
95 | protected static List devices; // we're caching this list for speed reasons
96 |
97 | NewSampleListener newSampleListener;
98 | NewPrerollListener newPrerollListener;
99 | private final Lock bufferLock = new ReentrantLock();
100 |
101 |
102 | /**
103 | * Open the default capture device
104 | * @param parent PApplet, typically "this"
105 | */
106 | public Capture(PApplet parent) {
107 | // Attempt to use a default resolution
108 | this(parent, 640, 480, null, 30);
109 | }
110 |
111 |
112 | /**
113 | * Open a specific capture device
114 | * @param device device name
115 | * @see Capture#list()
116 | * @see Capture#listRawNames()
117 | */
118 | public Capture(PApplet parent, String device) {
119 | // Attempt to use a default resolution
120 | this(parent, 640, 480, device, 30);
121 | }
122 |
123 |
124 | /**
125 | * Open the default capture device with a given resolution
126 | * @param width width in pixels
127 | * @param height height in pixels
128 | */
129 | public Capture(PApplet parent, int width, int height) {
130 | this(parent, width, height, null, 30);
131 | }
132 |
133 |
134 | /**
135 | * Open the default capture device with a given resolution and framerate
136 | * @param fps frames per second
137 | */
138 | public Capture(PApplet parent, int width, int height, float fps) {
139 | this(parent, width, height, null, fps);
140 | }
141 |
142 |
143 | /**
144 | * Open a specific capture device with a given resolution
145 | * @see Capture#list()
146 | */
147 | public Capture(PApplet parent, int width, int height, String device) {
148 | this(parent, width, height, device, 30);
149 | }
150 |
151 |
152 | /**
153 | * Open a specific capture device with a given framerate
154 | * @see Capture#list()
155 | */
156 | public Capture(PApplet parent, String device, float fps) {
157 | this(parent, 640, 480, device, fps);
158 | }
159 |
160 |
161 | /**
162 | * Open a specific capture device with a given resolution and framerate
163 | * @see Capture#list()
164 | */
165 | public Capture(PApplet parent, int width, int height, String device, float fps) {
166 | super(width, height, RGB);
167 | this.device = device;
168 | this.frameRate = fps;
169 | initGStreamer(parent);
170 | }
171 |
172 |
173 | /**
174 | * Disposes all the native resources associated to this capture device.
175 | *
176 | * NOTE: This is not official API and may/will be removed at any time.
177 | */
178 | public void dispose() {
179 | if (pipeline != null) {
180 | try {
181 | if (pipeline.isPlaying()) {
182 | pipeline.stop();
183 | pipeline.getState();
184 | }
185 | } catch (Exception e) {
186 | }
187 |
188 | pixels = null;
189 |
190 | if (rgbSink != null) {
191 | rgbSink.disconnect(newSampleListener);
192 | rgbSink.disconnect(newPrerollListener);
193 | rgbSink.dispose();
194 | }
195 | pipeline.setState(org.freedesktop.gstreamer.State.NULL);
196 | pipeline.getState();
197 | pipeline.getBus().dispose();
198 | pipeline.dispose();
199 |
200 | parent.g.removeCache(this);
201 | parent.unregisterMethod("dispose", this);
202 | parent.unregisterMethod("post", this);
203 | }
204 | }
205 |
206 |
207 | /**
208 | * Finalizer of the class.
209 | */
210 | protected void finalize() throws Throwable {
211 | try {
212 | dispose();
213 | } finally {
214 | // super.finalize();
215 | }
216 | }
217 |
218 |
219 | /**
220 | * Sets how often frames are read from the capture device. Setting the fps
221 | * parameter to 4, for example, will cause 4 frames to be read per second.
222 | *
223 | * @webref capture
224 | * @webBrief Sets how often frames are read from the capture device.
225 | * @usage web_application
226 | * @param ifps speed of the capture device in frames per second
227 | * @brief Sets the target frame rate
228 | */
229 | public void frameRate(float ifps) {
230 | float f = (0 < ifps && 0 < frameRate) ? ifps / frameRate : 1;
231 |
232 | long t = pipeline.queryPosition(TimeUnit.NANOSECONDS);
233 | long start, stop;
234 | if (rate > 0) {
235 | start = t;
236 | stop = -1;
237 | } else {
238 | start = 0;
239 | stop = t;
240 | }
241 |
242 | seek(rate * f, start, stop);
243 |
244 | frameRate = ifps;
245 | }
246 |
247 |
248 | /**
249 | * Returns "true" when a new frame from the device is available to read.
250 | *
251 | * @webref capture
252 | * @webBrief Returns "true" when a new frame from the device is available to read.
253 | * @usage web_application
254 | * @brief Returns "true" when a new frame is available to read.
255 | */
256 | public boolean available() {
257 | return available;
258 | }
259 |
260 |
261 | /**
262 | * Starts capturing frames from the selected device.
263 | *
264 | * @webref capture
265 | * @webBrief Starts capturing frames from an attached device.
266 | * @usage web_application
267 | * @brief Starts video capture
268 | */
269 | public void start() {
270 | setReady();
271 |
272 | pipeline.play();
273 | pipeline.getState();
274 |
275 | capturing = true;
276 | }
277 |
278 |
279 | /**
280 | * Stops capturing frames from an attached device.
281 | *
282 | * @webref capture
283 | * @webBrief Stops capturing frames from an attached device.
284 | * @usage web_application
285 | * @brief Stops video capture
286 | */
287 | public void stop() {
288 | setReady();
289 |
290 | pipeline.stop();
291 | pipeline.getState();
292 |
293 | capturing = false;
294 | }
295 |
296 |
297 | /**
298 | * Reads the current frame of the device.
299 | *
300 | * @webref capture
301 | * @webBrief Reads the current frame of the device.
302 | * @usage web_application
303 | * @brief Reads the current frame
304 | */
305 | public synchronized void read() {
306 | if (firstFrame) {
307 | super.init(sourceWidth, sourceHeight, RGB, 1);
308 | firstFrame = false;
309 | }
310 |
311 | if (useBufferSink) {
312 |
313 | if (bufferSink == null) {
314 | Object cache = parent.g.getCache(Capture.this);
315 | if (cache != null) {
316 | setBufferSink(cache);
317 | getSinkMethods();
318 | }
319 | }
320 |
321 | } else {
322 | int[] temp = pixels;
323 | pixels = copyPixels;
324 | updatePixels();
325 | copyPixels = temp;
326 | }
327 |
328 | available = false;
329 | newFrame = true;
330 | }
331 |
332 |
333 | /**
334 | * Loads the pixel data for the image into its pixels[] array.
335 | */
336 | @Override
337 | public synchronized void loadPixels() {
338 | super.loadPixels();
339 |
340 | if (useBufferSink && bufferSink != null) {
341 | try {
342 | // sinkGetMethod will copy the latest buffer to the pixels array,
343 | // and the pixels will be copied to the texture when the OpenGL
344 | // renderer needs to draw it.
345 | sinkGetMethod.invoke(bufferSink, new Object[] { pixels });
346 | } catch (Exception e) {
347 | e.printStackTrace();
348 | }
349 |
350 | outdatedPixels = false;
351 | }
352 | }
353 |
354 |
355 | /**
356 | * Reads the color of any pixel or grabs a section of an image.
357 | */
358 | @Override
359 | public int get(int x, int y) {
360 | if (outdatedPixels) loadPixels();
361 | return super.get(x, y);
362 | }
363 |
364 |
365 | /**
366 | * @param w width of pixel rectangle to get
367 | * @param h height of pixel rectangle to get
368 | */
369 | public PImage get(int x, int y, int w, int h) {
370 | if (outdatedPixels) loadPixels();
371 | return super.get(x, y, w, h);
372 | }
373 |
374 |
375 | @Override
376 | public PImage copy() {
377 | if (outdatedPixels) loadPixels();
378 | return super.copy();
379 | }
380 |
381 |
382 | protected void getImpl(int sourceX, int sourceY,
383 | int sourceWidth, int sourceHeight,
384 | PImage target, int targetX, int targetY) {
385 | if (outdatedPixels) loadPixels();
386 | super.getImpl(sourceX, sourceY, sourceWidth, sourceHeight,
387 | target, targetX, targetY);
388 | }
389 |
390 |
391 | /**
392 | * Check if this device object is currently capturing.
393 | */
394 | public boolean isCapturing() {
395 | return capturing;
396 | }
397 |
398 |
399 | ////////////////////////////////////////////////////////////
400 |
401 | // Initialization methods.
402 |
403 |
404 | protected void initGStreamer(PApplet parent) {
405 | this.parent = parent;
406 | pipeline = null;
407 |
408 | Video.init();
409 |
410 | if(device == null) {
411 | String[] devices = list();
412 | if(devices != null && devices.length > 0) {
413 | device = devices[0];
414 | } else {
415 | throw new IllegalStateException("Could not find any devices");
416 | }
417 | }
418 |
419 | device = device.trim();
420 |
421 | int p = device.indexOf("pipeline:");
422 | if (p == 0) {
423 | initCustomPipeline(device.substring(9));
424 | } else {
425 | initDevicePipeline();
426 | }
427 |
428 | try {
429 | // Register methods
430 | parent.registerMethod("dispose", this);
431 | parent.registerMethod("post", this);
432 |
433 | setEventHandlerObject(parent);
434 |
435 | sourceWidth = sourceHeight = 0;
436 | sourceFrameRate = -1;
437 | frameRate = -1;
438 | rate = 1.0f;
439 | ready = false;
440 | } catch (Exception e) {
441 | e.printStackTrace();
442 | }
443 | }
444 |
445 |
446 | public static String fpsToFramerate(float fps) {
447 | String formatted = Float.toString(fps);
448 | // This presumes the delimitter is always a dot
449 | int i = formatted.indexOf('.');
450 | if (Math.floor(fps) != fps) {
451 | int denom = (int)Math.pow(10, formatted.length()-i-1);
452 | int num = (int)(fps * denom);
453 | return num + "/" + denom;
454 | } else {
455 | return (int)fps + "/1";
456 | }
457 | }
458 |
459 |
460 | protected void initCustomPipeline(String pstr) {
461 | String PIPELINE_END = " ! videorate ! videoscale ! videoconvert ! appsink name=sink";
462 |
463 | pipeline = (Pipeline) Gst.parseLaunch(pstr + PIPELINE_END);
464 |
465 | String caps = ", width=" + width + ", height=" + height;
466 | if (frameRate != 0.0) {
467 | caps += ", framerate=" + fpsToFramerate(frameRate);
468 | }
469 |
470 | rgbSink = (AppSink) pipeline.getElementByName("sink");
471 | rgbSink.set("emit-signals", true);
472 | newSampleListener = new NewSampleListener();
473 | newPrerollListener = new NewPrerollListener();
474 | rgbSink.connect(newSampleListener);
475 | rgbSink.connect(newPrerollListener);
476 |
477 | useBufferSink = Video.useGLBufferSink && parent.g.isGL();
478 | if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
479 | if (useBufferSink) {
480 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=RGBx" + caps));
481 | } else {
482 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=BGRx" + caps));
483 | }
484 | } else {
485 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=xRGB" + caps));
486 | }
487 |
488 | makeBusConnections(pipeline.getBus());
489 | }
490 |
491 |
492 | protected void initDevicePipeline() {
493 | Element srcElement = null;
494 | if (device == null) {
495 | // Use the default device from GStreamer
496 | srcElement = ElementFactory.make("autovideosrc", null);
497 | } else {
498 | // Look for device
499 | if (devices == null) {
500 | DeviceMonitor monitor = new DeviceMonitor();
501 | monitor.addFilter("Video/Source", null);
502 | devices = monitor.getDevices();
503 | monitor.close();
504 | }
505 |
506 | for (int i=0; i < devices.size(); i++) {
507 | String deviceName = assignDisplayName(devices.get(i), i);
508 | if (devices.get(i).getDisplayName().equals(device) || devices.get(i).getName().equals(device) || deviceName.equals(device)) {
509 | srcElement = devices.get(i).createElement(null);
510 | break;
511 | }
512 | }
513 |
514 | // Error out if we got passed an invalid device name
515 | if (srcElement == null) {
516 | throw new RuntimeException("Could not find device " + device);
517 | }
518 | }
519 |
520 | pipeline = new Pipeline();
521 |
522 | Element videoscale = ElementFactory.make("videoscale", null);
523 | Element videoconvert = ElementFactory.make("videoconvert", null);
524 | Element capsfilter = ElementFactory.make("capsfilter", null);
525 |
526 | String frameRateString;
527 | if (frameRate != 0.0) {
528 | frameRateString = ", framerate=" + fpsToFramerate(frameRate);
529 | } else {
530 | System.err.println("The capture framerate cannot be zero!");
531 | return;
532 | }
533 |
534 | capsfilter.set("caps", Caps.fromString("video/x-raw, width=" + width + ", height=" + height + frameRateString));
535 |
536 | initSink();
537 |
538 | pipeline.add(srcElement);
539 | pipeline.add(videoscale);
540 | pipeline.add(videoconvert);
541 | pipeline.add(capsfilter);
542 | pipeline.add(rgbSink);
543 |
544 | srcElement.link(videoscale);
545 | videoscale.link(videoconvert);
546 | videoconvert.link(capsfilter);
547 | capsfilter.link(rgbSink);
548 |
549 | makeBusConnections(pipeline.getBus());
550 | }
551 |
552 |
553 | /**
554 | * Uses a generic object as handler of the capture. This object should have a
555 | * captureEvent method that receives a Capture argument. This method will
556 | * be called upon a new frame read event.
557 | *
558 | */
559 | protected void setEventHandlerObject(Object obj) {
560 | eventHandler = obj;
561 |
562 | try {
563 | captureEventMethod = eventHandler.getClass().getMethod("captureEvent", Capture.class);
564 | return;
565 | } catch (Exception e) {
566 | // no such method, or an error... which is fine, just ignore
567 | }
568 |
569 | // captureEvent can alternatively be defined as receiving an Object, to allow
570 | // Processing mode implementors to support the video library without linking
571 | // to it at build-time.
572 | try {
573 | captureEventMethod = eventHandler.getClass().getMethod("captureEvent", Object.class);
574 | } catch (Exception e) {
575 | // no such method, or an error... which is fine, just ignore
576 | }
577 | }
578 |
579 |
580 | protected void initSink() {
581 | rgbSink = new AppSink("capture sink");
582 | rgbSink.set("emit-signals", true);
583 | newSampleListener = new NewSampleListener();
584 | newPrerollListener = new NewPrerollListener();
585 | rgbSink.connect(newSampleListener);
586 | rgbSink.connect(newPrerollListener);
587 |
588 | useBufferSink = Video.useGLBufferSink && parent.g.isGL();
589 | if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
590 | if (useBufferSink) rgbSink.setCaps(Caps.fromString("video/x-raw, format=RGBx"));
591 | else rgbSink.setCaps(Caps.fromString("video/x-raw, format=BGRx"));
592 | } else {
593 | rgbSink.setCaps(Caps.fromString("video/x-raw, format=xRGB"));
594 | }
595 | }
596 |
597 |
598 | protected void setReady() {
599 | if (!ready) {
600 | pipeline.setState(org.freedesktop.gstreamer.State.READY);
601 | newFrame = false;
602 | ready = true;
603 | }
604 | }
605 |
606 |
607 | private void makeBusConnections(Bus bus) {
608 | bus.connect(new Bus.ERROR() {
609 | public void errorMessage(GstObject arg0, int arg1, String arg2) {
610 | System.err.println(arg0 + " : " + arg2);
611 | }
612 | });
613 | bus.connect(new Bus.EOS() {
614 | public void endOfStream(GstObject arg0) {
615 | try {
616 | stop();
617 | } catch (Exception ex) {
618 | ex.printStackTrace();
619 | }
620 | }
621 | });
622 | }
623 |
624 |
625 | ////////////////////////////////////////////////////////////
626 |
627 | // Stream event handling.
628 |
629 |
630 | private void seek(double rate, long start, long stop) {
631 | Gst.invokeLater(new Runnable() {
632 | public void run() {
633 | boolean res = pipeline.seek(rate, Format.TIME, EnumSet.of(SeekFlags.FLUSH, SeekFlags.ACCURATE), SeekType.SET, start, SeekType.SET, stop);
634 | if (!res) {
635 | PGraphics.showWarning("Seek operation failed.");
636 | }
637 | }
638 | });
639 | }
640 |
641 |
642 | private void fireCaptureEvent() {
643 | if (captureEventMethod != null) {
644 | try {
645 | captureEventMethod.invoke(eventHandler, this);
646 | } catch (Exception e) {
647 | System.err.println("error, disabling captureEvent()");
648 | e.printStackTrace();
649 | captureEventMethod = null;
650 | }
651 | }
652 | }
653 |
654 |
655 | ////////////////////////////////////////////////////////////
656 |
657 | // Buffer source interface.
658 |
659 |
660 | /**
661 | * Sets the object to use as destination for the frames read from the stream.
662 | * The color conversion mask is automatically set to the one required to
663 | * copy the frames to OpenGL.
664 | *
665 | * NOTE: This is not official API and may/will be removed at any time.
666 | *
667 | * @param Object dest
668 | */
669 | public void setBufferSink(Object sink) {
670 | bufferSink = sink;
671 | }
672 |
673 |
674 | /**
675 | * NOTE: This is not official API and may/will be removed at any time.
676 | */
677 | public boolean hasBufferSink() {
678 | return bufferSink != null;
679 | }
680 |
681 |
682 | /**
683 | * NOTE: This is not official API and may/will be removed at any time.
684 | */
685 | public synchronized void disposeBuffer(Object buf) {
686 | ((Buffer)buf).dispose();
687 | }
688 |
689 |
690 | protected void getSinkMethods() {
691 | try {
692 | sinkCopyMethod = bufferSink.getClass().getMethod("copyBufferFromSource",
693 | new Class[] { Object.class, ByteBuffer.class, int.class, int.class });
694 | } catch (Exception e) {
695 | throw new RuntimeException("Capture: provided sink object doesn't have a " +
696 | "copyBufferFromSource method.");
697 | }
698 |
699 | try {
700 | sinkSetMethod = bufferSink.getClass().getMethod("setBufferSource",
701 | new Class[] { Object.class });
702 | sinkSetMethod.invoke(bufferSink, new Object[] { this });
703 | } catch (Exception e) {
704 | throw new RuntimeException("Capture: provided sink object doesn't have a " +
705 | "setBufferSource method.");
706 | }
707 |
708 | try {
709 | sinkDisposeMethod = bufferSink.getClass().getMethod("disposeSourceBuffer",
710 | new Class[] { });
711 | } catch (Exception e) {
712 | throw new RuntimeException("Capture: provided sink object doesn't have " +
713 | "a disposeSourceBuffer method.");
714 | }
715 |
716 | try {
717 | sinkGetMethod = bufferSink.getClass().getMethod("getBufferPixels",
718 | new Class[] { int[].class });
719 | } catch (Exception e) {
720 | throw new RuntimeException("Capture: provided sink object doesn't have " +
721 | "a getBufferPixels method.");
722 | }
723 | }
724 |
725 |
726 | public synchronized void post() {
727 | if (useBufferSink && sinkDisposeMethod != null) {
728 | try {
729 | sinkDisposeMethod.invoke(bufferSink, new Object[] {});
730 | } catch (Exception e) {
731 | e.printStackTrace();
732 | }
733 | }
734 | }
735 |
736 | /**
737 | * Returns a list of all capture devices, using the device's pretty display name.
738 | * Multiple devices can have identical display names, appending ' #n' to devices
739 | * with duplicate display names.
740 | * @return array of device names
741 | * @webref capture
742 | * @webBrief Get a list of all capture device names
743 | */
744 | static public String[] list() {
745 | Video.init();
746 |
747 | String[] out;
748 |
749 | DeviceMonitor monitor = new DeviceMonitor();
750 | monitor.addFilter("Video/Source", null);
751 | devices = monitor.getDevices();
752 | monitor.close();
753 |
754 | out = new String[devices.size()];
755 | for (int i = 0; i < devices.size(); i++) {
756 | Device dev = devices.get(i);
757 | out[i] = checkCameraDuplicates(dev) > 1 ? assignDisplayName(dev, i) : dev.getDisplayName();
758 | }
759 |
760 | return out;
761 | }
762 |
763 | // This is a temporary addition until it's decided how to bring back resolution/framerate caps to the official API.
764 | // The old way of doing things is still listed in the video tutorial:
765 | // https://processing.org/tutorials/video
766 | static public String[] getCapabilities(String device) {
767 | for (int i=0; i < devices.size(); i++) {
768 | String deviceName = assignDisplayName(devices.get(i), i);
769 | if (devices.get(i).getDisplayName().equals(device) || devices.get(i).getName().equals(device) || deviceName.equals(device)) {
770 | return parseCaps(devices.get(i));
771 | }
772 | }
773 | return new String[]{};
774 | }
775 |
776 | static private String[] parseCaps(Device dev) {
777 | String[] caps = dev.getCaps().toString().split(";");
778 | ArrayList devCaps = new ArrayList();
779 |
780 | for (String cap: caps) {
781 | if (cap.indexOf("video/x-raw,") == -1) continue; // Looking for raw caps (excluding GLMemory stuff)
782 |
783 | int indexWidth = cap.indexOf("width");
784 | int indexHeight = cap.indexOf("height");
785 | int indexFramerate = cap.indexOf("framerate");
786 |
787 | String stringWidth = "";
788 | String stringHeight = "";
789 | String stringFramerate = "";
790 |
791 | if (0 < indexWidth && 0 < indexHeight && 0 < indexFramerate) {
792 | stringWidth = cap.substring(indexWidth, cap.indexOf(',', indexWidth));
793 | stringHeight = cap.substring(indexHeight, cap.indexOf(", format", indexHeight));
794 | stringFramerate = cap.substring(indexFramerate, cap.indexOf(']', indexFramerate));
795 | }
796 | // PApplet.println("=======>", cap);
797 | if (0 < stringHeight.indexOf("{")) {
798 | // A list of heights... something like "height=(int){ 448, 600 }
799 | stringHeight = stringHeight.substring(13, stringHeight.length() - 1);
800 | String[] values = stringHeight.split(",");
801 | for (String value: values) {
802 | stringHeight = "height=(int)" + value.trim();
803 | addCapStringsToList(stringWidth, stringHeight, stringFramerate, devCaps);
804 | }
805 | } else {
806 | addCapStringsToList(stringWidth, stringHeight, stringFramerate, devCaps);
807 | }
808 | }
809 |
810 | String[] out = new String[0];
811 | return devCaps.toArray(out);
812 | }
813 |
814 | static private void addCapStringsToList(String stringWidth, String stringHeight, String stringFramerate, ArrayList devCaps) {
815 | if (0 < stringWidth.split("=").length) { // Expecting a string of the form "width=(int)1600"
816 | stringWidth = stringWidth.substring(11);
817 | try {
818 | Integer.parseInt(stringWidth);
819 | } catch (NumberFormatException ex) {
820 | stringHeight = "";
821 | }
822 | }
823 | if (0 < stringHeight.split("=").length) { // Expecting a string of the form "height=(int)896"
824 | stringHeight = stringHeight.substring(12);
825 | try {
826 | Integer.parseInt(stringHeight);
827 | } catch (NumberFormatException ex) {
828 | stringHeight = "";
829 | }
830 | }
831 | if (0 < stringFramerate.split("=,").length) { // Expecting a string of the form "framerate=(fraction)[ 5/1, 10000000/333333"
832 | stringFramerate = stringFramerate.substring(stringFramerate.indexOf("="));
833 | String[] fpsParts = stringFramerate.split(",");
834 | if (1 < fpsParts.length) {
835 | stringFramerate = fpsParts[1].trim();
836 | fpsParts = stringFramerate.split("/");
837 | if (fpsParts.length == 2) {
838 | try {
839 | int fpsNumerator = Integer.parseInt(fpsParts[0]);
840 | int fpsDenominator = Integer.parseInt(fpsParts[1]);
841 | int fps = fpsNumerator / fpsDenominator;
842 | stringFramerate = String.valueOf(fps);
843 | } catch (NumberFormatException ex) {
844 | stringFramerate = "";
845 | }
846 | }
847 | }
848 | }
849 | if (!stringWidth.equals("") && !stringHeight.equals("") && !stringFramerate.equals("")) {
850 | devCaps.add("size=" + stringWidth + "x" + stringHeight + ",fps=" + stringFramerate);
851 | }
852 | }
853 |
854 | static private String assignDisplayName(Device d, int pos) {
855 | String s = "";
856 | int count = 1;
857 |
858 | for(int i = 0; i < devices.size(); i++) {
859 | if (devices.get(i).getDisplayName().equals(d.getDisplayName())){
860 | if (i == pos) {
861 | s = d.getDisplayName() + " #" + Integer.toString(count);
862 | }
863 | count++;
864 | }
865 | }
866 |
867 | return s;
868 | }
869 |
870 | static private int checkCameraDuplicates(Device d) {
871 | int count = 0;
872 | for (int i = 0; i < devices.size(); i++) {
873 | if (devices.get(i).getDisplayName().equals(d.getDisplayName())) {
874 | count++;
875 | }
876 | }
877 | return count;
878 | }
879 |
880 |
881 | private class NewSampleListener implements AppSink.NEW_SAMPLE {
882 |
883 | @Override
884 | public FlowReturn newSample(AppSink sink) {
885 | Sample sample = sink.pullSample();
886 |
887 | // Pull out metadata from caps
888 | Structure capsStruct = sample.getCaps().getStructure(0);
889 | sourceWidth = capsStruct.getInteger("width");
890 | sourceHeight = capsStruct.getInteger("height");
891 | Fraction fps = capsStruct.getFraction("framerate");
892 | sourceFrameRate = (float)fps.numerator / fps.denominator;
893 |
894 | // Set the playback rate to the file's native framerate
895 | // unless the user has already set a custom one
896 | if (frameRate == -1.0) {
897 | frameRate = sourceFrameRate;
898 | }
899 |
900 | Buffer buffer = sample.getBuffer();
901 | ByteBuffer bb = buffer.map(false);
902 | if (bb != null) {
903 |
904 | // If the EDT is still copying data from the buffer, just drop this frame
905 | if (!bufferLock.tryLock()) {
906 | return FlowReturn.OK;
907 | }
908 |
909 | available = true;
910 | if (useBufferSink && bufferSink != null) { // The native buffer from GStreamer is copied to the buffer sink.
911 |
912 | try {
913 | sinkCopyMethod.invoke(bufferSink, new Object[] { buffer, bb, sourceWidth, sourceHeight });
914 | if (capturing) {
915 | fireCaptureEvent();
916 | }
917 | } catch (Exception e) {
918 | e.printStackTrace();
919 | } finally {
920 | bufferLock.unlock();
921 | }
922 |
923 | } else {
924 | IntBuffer rgb = bb.asIntBuffer();
925 |
926 | if (copyPixels == null) {
927 | copyPixels = new int[sourceWidth * sourceHeight];
928 | }
929 |
930 | try {
931 | rgb.get(copyPixels, 0, width * height);
932 | if (capturing) {
933 | fireCaptureEvent();
934 | }
935 | } finally {
936 | bufferLock.unlock();
937 | }
938 |
939 | }
940 |
941 | buffer.unmap();
942 | }
943 | sample.dispose();
944 | return FlowReturn.OK;
945 | }
946 | }
947 |
948 |
949 | private class NewPrerollListener implements AppSink.NEW_PREROLL {
950 | @Override
951 | public FlowReturn newPreroll(AppSink sink) {
952 | Sample sample = sink.pullPreroll();
953 |
954 | // Pull out metadata from caps
955 | Structure capsStruct = sample.getCaps().getStructure(0);
956 | sourceWidth = capsStruct.getInteger("width");
957 | sourceHeight = capsStruct.getInteger("height");
958 | Fraction fps = capsStruct.getFraction("framerate");
959 | sourceFrameRate = (float)fps.numerator / fps.denominator;
960 |
961 | // Set the playback rate to the file's native framerate
962 | // unless the user has already set a custom one
963 | if (frameRate == -1.0) {
964 | frameRate = sourceFrameRate;
965 | }
966 |
967 | sample.dispose();
968 | return FlowReturn.OK;
969 | }
970 | }
971 | }
972 |
--------------------------------------------------------------------------------