├── .idea
├── .name
├── encodings.xml
├── ivport-v2.iml
├── misc.xml
├── modules.xml
└── vcs.xml
├── IIC.py
├── README.md
├── init_ivport.py
├── ivport.py
├── picamera
├── IIC.py
├── VERSION
├── __init__.py
├── array.py
├── bcm_host.py
├── camera.py
├── camera_1.10_ivport.py
├── camera_1.12_backup.py
├── camera_1.12_ivport.py
├── color.py
├── encoders.py
├── exc.py
├── mmal.py
├── renderers.py
└── streams.py
├── test_ivport.py
└── test_ivport_quad.py
/.idea/.name:
--------------------------------------------------------------------------------
1 | ivport-v2
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/ivport-v2.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/IIC.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # This file is part of Ivport.
4 | # Copyright (C) 2015 Ivmech Mechatronics Ltd.
5 | #
6 | # Ivport is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # Ivport is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | #title :IIC.py
20 | #description :IIC module for ivport v2 camera multiplexer
21 | #author :Caner Durmusoglu
22 | #date :20160514
23 | #version :0.1
24 | #usage :
25 | #notes :
26 | #python_version :2.7
27 | #==============================================================================
28 |
29 | from datetime import datetime
30 |
31 | import smbus
32 |
33 | iic_address = (0x70)
34 | iic_register = (0x00)
35 |
36 | iic_bus0 = (0x01)
37 | iic_bus1 = (0x02)
38 | iic_bus2 = (0x04)
39 | iic_bus3 = (0x08)
40 |
41 | class IIC():
42 | def __init__(self, twi=1, addr=iic_address, bus_enable=iic_bus0):
43 | self._bus = smbus.SMBus(twi)
44 | self._addr = addr
45 | config = bus_enable
46 | self._write(iic_register, config)
47 |
48 | def _write(self, register, data):
49 | self._bus.write_byte_data(self._addr, register, data)
50 |
51 | def _read(self):
52 | return self._bus.read_byte(self._addr)
53 |
54 | def read_control_register(self):
55 | value = self._read()
56 | return value
57 |
58 | def write_control_register(self, config):
59 | self._write(iic_register, config)
60 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | IVPORT V2
2 | ---
3 |
4 | IVPORT V2 is compatible with Raspberry Pi Camera Module V2 with 8MP SONY IMX219 Sensor
5 |
6 | IVPORT and IVPORT V2 which are the first Raspberry Pi (also Raspberry Pi A,A+,B+ and Raspberry Pi 2,3 fully compatible) Camera Module multiplexer is designed to make possible connecting more than one camera module to single CSI camera port on Raspberry Pi. Multiplexing can be controlled by 3 pins for 4 camera modules, 5 pins for 8 camera modules and 9 pins for **maximum up to 16 camera modules** with using GPIO.
7 |
8 | IVPort has already been preferred by ESA, MIT Lab, Spacetrex Lab, well known company research centers and numerous different universities.
9 |
10 | Getting Started
11 | ---
12 |
13 | ### Order
14 |
15 | IVPORT V2 is available at [HERE](http://www.ivmech.com/magaza/ivport-v2-p-107).
16 |
17 | ### Installation
18 |
19 | First of all please enable I2C from raspi-config, [guide this link](http://www.raspberrypi-spy.co.uk/2014/11/enabling-the-i2c-interface-on-the-raspberry-pi)
20 |
21 | And enable Camera Module from raspi-config on older versions of Raspberry OS.
22 |
23 | Also Enable Legacy Camera from raspi-config on Raspberry OS Debian Version 11 (bullseye) [guide this link](https://www.youtube.com/watch?v=E7KPSc_Xr24)
24 |
25 | ### Cloning a Repository
26 |
27 | ```shell
28 | git clone https://github.com/ivmech/ivport-v2.git
29 | ```
30 |
31 | ### Dependency Installation
32 |
33 | ```shell
34 | sudo apt-get install python3-smbus
35 | ```
36 | picamera module was forked from https://github.com/waveform80/picamera and small edits for camera v2 and ivport support. It may be needed to uninstall preinstalled picamera module on device.
37 |
38 | ```shell
39 | sudo apt-get remove python-picamera
40 | sudo pip uninstall picamera
41 | ```
42 |
43 | ### Usage
44 |
45 | First of all it is important that **init_ivport.py** should be run at every boot before starting to access camera.
46 |
47 | ```shell
48 | cd ivport-v2
49 | python init_ivport.py
50 | ```
51 |
52 | It is needed to reboot raspberry pi at initial run of **init_ivport.py**
53 |
54 | ```shell
55 | sudo reboot
56 | ```
57 |
58 | And check whether ivport and camera are detected by raspberry pi or no with **vcgencmd get_camera**.
59 |
60 | ```shell
61 | root@ivport:~/ivport-v2 $ i2cdetect -y 1
62 | 0 1 2 3 4 5 6 7 8 9 a b c d e f
63 | 00: -- -- -- -- -- -- -- -- -- -- -- -- --
64 | 10: 10 -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
65 | 20: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
66 | 30: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
67 | 40: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
68 | 50: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
69 | 60: -- -- -- -- 64 -- -- -- -- -- -- -- -- -- -- --
70 | 70: 70 -- -- -- -- -- -- --
71 | ```
72 | You should get both **0x70** and **0x64** as addresses of respectively **ivport v2** and **camera module v2**.
73 |
74 | If you dont get any addresses please check [IMPORTANT FIX](https://github.com/ivmech/ivport-v2/wiki/Important-Fix).
75 |
76 | ```shell
77 | root@ivport:~/ivport-v2 $ vcgencmd get_camera
78 | supported=1 detected=1
79 | ```
80 | **supported** and **detected** should be **1** before **test_ivport.py** script.
81 |
82 | There is **test_ivport.py** script for **IVPORT DUAL V2**.
83 |
84 | ```python
85 | import ivport
86 | # raspistill capture
87 | def capture(camera):
88 | "This system command for raspistill capture"
89 | cmd = "raspistill -t 10 -o still_CAM%d.jpg" % camera
90 | os.system(cmd)
91 |
92 | iv = ivport.IVPort(ivport.TYPE_DUAL2)
93 | iv.camera_change(1)
94 | capture(1)
95 | iv.camera_change(2)
96 | capture(2)
97 | iv.close()
98 | ```
99 | **TYPE** and **JUMPER** settings are configured while initialize ivport.
100 | ```python
101 | ivport.IVPort(IVPORT_TYPE, IVPORT_JUMPER)
102 | ```
103 | **RESOLUTION**, **FRAMERATE** and other settings can be configured.
104 | ```python
105 | iv = ivport.IVPort(ivport.TYPE_DUAL2)
106 | iv.camera_open(camera_v2=True, resolution=(640, 480), framerate=60)
107 | ```
108 | Also **init_ivport.py** should be run at every boot before starting to access camera.
109 |
110 | ```shell
111 | cd ivport-v2
112 | python init_ivport.py
113 | ```
114 |
115 | Tests
116 | ------
117 |
118 | There is **test_ivport.py** script which is for testing.
119 | ```shell
120 | cd ivport-v2
121 | python test_ivport.py
122 | ```
123 |
124 | Wiki
125 | ------
126 |
127 | #### See wiki pages from [here](https://github.com/ivmech/ivport/wiki).
128 |
129 | Video
130 | -------
131 |
132 | IVPort can be used for stereo vision with stereo camera.
133 |
134 | ### [Youtube video](https://www.youtube.com/watch?v=w4JZN7Y0d2o) of stereo recording with 2 camera modules
135 | [](https://www.youtube.com/watch?v=w4JZN7Y0d2o)
136 |
137 | ### IVPort was [@hackaday](http://hackaday.com/2014/12/19/multiplexing-pi-cameras/).
138 |
--------------------------------------------------------------------------------
/init_ivport.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import IIC
4 |
5 | if __name__ == "__main__":
6 | iviic = IIC.IIC(addr=(0x70), bus_enable =(0x01))
7 |
--------------------------------------------------------------------------------
/ivport.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # This file is part of Ivport.
4 | # Copyright (C) 2016 Ivmech Mechatronics Ltd.
5 | #
6 | # Ivport is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # Ivport is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | #title :ivport.py
20 | #description :ivport.py is module for capturing ivport camera multiplexer
21 | #author :Caner Durmusoglu
22 | #date :20160514
23 | #version :0.1
24 | #usage :import ivport
25 | #notes :
26 | #python_version :2.7
27 | #==============================================================================
28 |
29 | import sys
30 | import functools
31 |
32 | try:
33 | import IIC
34 | import RPi.GPIO as gp
35 | gp.setwarnings(False)
36 | gp.setmode(gp.BOARD)
37 | except:
38 | print("There are no IIC.py and RPi.GPIO module.")
39 | print("install RPi.GPIO: sudo apt-get install python-rpi.gpio")
40 | sys.exit(0)
41 |
42 | #try:
43 | # import picamera
44 | #except:
45 | # print("There are no picamera module or directory.")
46 | # sys.exit(0)
47 |
48 | TYPE_QUAD = 0
49 | TYPE_QUAD2 = 1
50 | TYPE_DUAL = 2
51 | TYPE_DUAL2 = 3
52 |
53 | class IVPort():
54 | IVJP = {'A': (11, 12), 'C': (21, 22), 'B': (15, 16), 'D': (23, 24)}
55 | pins = list(functools.reduce(lambda x,y: x+y, IVJP.values()))
56 | pins.sort()
57 | DIVJP = {i+1 : x for i,x in enumerate(pins)}
58 | del(pins)
59 |
60 | def __init__(self, iv_type=TYPE_DUAL2, iv_jumper=1):
61 |
62 | self.fPin = self.f1Pin = self.f2Pin = self.ePin = 0
63 | self.ivport_type = iv_type
64 | self.is_camera_v2 = self.ivport_type in (TYPE_DUAL2, TYPE_QUAD2)
65 | self.is_dual = self.ivport_type in (TYPE_DUAL2, TYPE_DUAL)
66 | self.ivport_jumper = iv_jumper
67 | if not self.is_dual: self.ivport_jumper = 'A'
68 | self.camera = 1
69 | self.is_opened = False
70 |
71 | if self.is_camera_v2:
72 | self.iviic = IIC.IIC(addr=(0x70), bus_enable =(0x01))
73 |
74 | self.link_gpio()
75 |
76 | def link_gpio(self):
77 | if self.is_dual:
78 | self.fPin = self.DIVJP[self.ivport_jumper]
79 | gp.setup(self.fPin, gp.OUT)
80 | else:
81 | self.f1Pin, self.f2Pin = self.IVJP[self.ivport_jumper]
82 | self.ePin = 7
83 | gp.setup(self.f1Pin, gp.OUT)
84 | gp.setup(self.f2Pin, gp.OUT)
85 | gp.setup(self.ePin, gp.OUT)
86 |
87 | # ivport camera change
88 | def camera_change(self, camera=1):
89 | if self.is_dual:
90 | if camera == 1:
91 | if self.is_camera_v2: self.iviic.write_control_register((0x01))
92 | gp.output(self.fPin, False)
93 | elif camera == 2:
94 | if self.is_camera_v2: self.iviic.write_control_register((0x02))
95 | gp.output(self.fPin, True)
96 | else:
97 | print("Ivport type is DUAL.")
98 | print("There isnt camera: %d" % camera)
99 | self.close()
100 | sys.exit(0)
101 | else:
102 | if camera == 1:
103 | if self.is_camera_v2: self.iviic.write_control_register((0x01))
104 | gp.output(self.ePin, False)
105 | gp.output(self.f1Pin, False)
106 | gp.output(self.f2Pin, True)
107 | elif camera == 2:
108 | if self.is_camera_v2: self.iviic.write_control_register((0x02))
109 | gp.output(self.ePin, True)
110 | gp.output(self.f1Pin, False)
111 | gp.output(self.f2Pin, True)
112 | elif camera == 3:
113 | if self.is_camera_v2: self.iviic.write_control_register((0x04))
114 | gp.output(self.ePin, False)
115 | gp.output(self.f1Pin, True)
116 | gp.output(self.f2Pin, False)
117 | elif camera == 4:
118 | if self.is_camera_v2: self.iviic.write_control_register((0x08))
119 | gp.output(self.ePin, True)
120 | gp.output(self.f1Pin, True)
121 | gp.output(self.f2Pin, False)
122 | else:
123 | print("Ivport type is QUAD.")
124 | print("Cluster feature hasnt been implemented yet.")
125 | print("There isnt camera: %d" % camera)
126 | self.close()
127 | sys.exit(0)
128 | self.camera = camera
129 |
130 | # picamera initialize
131 | # Camera V2
132 | # capture_sequence and start_recording require "camera_v2=True"
133 | # standart capture function doesnt require "camera_v2=True"
134 | def camera_open(self, camera_v2=False, resolution=None, framerate=None, grayscale=False):
135 | if self.is_opened: return
136 | self.picam = picamera.PiCamera(camera_v2=camera_v2, resolution=resolution, framerate=framerate)
137 | if grayscale: self.picam.color_effects = (128, 128)
138 | self.is_opened = True
139 |
140 | # picamera capture
141 | def camera_capture(self, filename, **options):
142 | if self.is_opened:
143 | self.picam.capture(filename + "_CAM" + str(self.camera) + '.jpg', **options)
144 | else:
145 | print("Camera is not opened.")
146 |
147 | def camera_sequence(self, **options):
148 | if self.is_opened:
149 | self.picam.capture_sequence(**options)
150 | else:
151 | print("Camera is not opened.")
152 |
153 | def close(self):
154 | self.camera_change(1)
155 | if self.is_opened: self.picam.close()
156 |
--------------------------------------------------------------------------------
/picamera/IIC.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # This file is part of Ivport.
4 | # Copyright (C) 2015 Ivmech Mechatronics Ltd.
5 | #
6 | # Ivport is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # Ivport is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | #title :IIC.py
20 | #description :IIC module for ivport v2 camera multiplexer
21 | #author :Caner Durmusoglu
22 | #date :20160514
23 | #version :0.1
24 | #usage :
25 | #notes :
26 | #python_version :2.7
27 | #==============================================================================
28 |
29 | from datetime import datetime
30 |
31 | import smbus
32 |
33 | iic_address = (0x70)
34 | iic_register = (0x00)
35 |
36 | iic_bus0 = (0x01)
37 | iic_bus1 = (0x02)
38 | iic_bus2 = (0x04)
39 | iic_bus3 = (0x08)
40 |
41 | class IIC():
42 | def __init__(self, twi=1, addr=iic_address, bus_enable=iic_bus0):
43 | self._bus = smbus.SMBus(twi)
44 | self._addr = addr
45 | config = bus_enable
46 | self._write(iic_register, config)
47 |
48 | def _write(self, register, data):
49 | self._bus.write_byte_data(self._addr, register, data)
50 |
51 | def _read(self):
52 | return self._bus.read_byte(self._addr)
53 |
54 | def read_control_register(self):
55 | value = self._read()
56 | return value
57 |
58 | def write_control_register(self, config):
59 | self._write(iic_register, config)
60 |
--------------------------------------------------------------------------------
/picamera/VERSION:
--------------------------------------------------------------------------------
1 | 1.10
2 |
--------------------------------------------------------------------------------
/picamera/__init__.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The picamera package consists of several modules which provide a pure Python
32 | interface to the Raspberry Pi's camera module. The package is only intended to
33 | run on a Raspberry Pi, and expects to be able to load the MMAL library
34 | (libmmal.so) upon import.
35 |
36 | The classes defined by most modules in this package are available directly from
37 | the :mod:`picamera` namespace. In other words, the following code is typically
38 | all that is required to access classes in the package::
39 |
40 | import picamera
41 |
42 | The :mod:`picamera.array` module is an exception to this as it depends on the
43 | third-party `numpy`_ package (this avoids making numpy a mandatory dependency
44 | for picamera).
45 |
46 | .. _numpy: http://www.numpy.org/
47 |
48 |
49 | The following sections document the various modules available within the
50 | package:
51 |
52 | * :mod:`picamera.camera`
53 | * :mod:`picamera.encoders`
54 | * :mod:`picamera.streams`
55 | * :mod:`picamera.renderers`
56 | * :mod:`picamera.color`
57 | * :mod:`picamera.exc`
58 | * :mod:`picamera.array`
59 | """
60 |
61 | from __future__ import (
62 | unicode_literals,
63 | print_function,
64 | division,
65 | absolute_import,
66 | )
67 |
68 | # Make Py2's str equivalent to Py3's
69 | str = type('')
70 |
71 | from picamera.exc import (
72 | PiCameraWarning,
73 | PiCameraError,
74 | PiCameraRuntimeError,
75 | PiCameraClosed,
76 | PiCameraNotRecording,
77 | PiCameraAlreadyRecording,
78 | PiCameraValueError,
79 | PiCameraMMALError,
80 | mmal_check,
81 | )
82 | from picamera.camera import PiCamera
83 | from picamera.encoders import (
84 | PiVideoFrame,
85 | PiVideoFrameType,
86 | PiEncoder,
87 | PiVideoEncoder,
88 | PiImageEncoder,
89 | PiRawMixin,
90 | PiCookedVideoEncoder,
91 | PiRawVideoEncoder,
92 | PiOneImageEncoder,
93 | PiMultiImageEncoder,
94 | PiRawImageMixin,
95 | PiCookedOneImageEncoder,
96 | PiRawOneImageEncoder,
97 | PiCookedMultiImageEncoder,
98 | PiRawMultiImageEncoder,
99 | )
100 | from picamera.renderers import (
101 | PiRenderer,
102 | PiOverlayRenderer,
103 | PiPreviewRenderer,
104 | PiNullSink,
105 | )
106 | from picamera.streams import PiCameraCircularIO, CircularIO
107 | from picamera.color import Color, Red, Green, Blue, Hue, Lightness, Saturation
108 |
109 |
--------------------------------------------------------------------------------
/picamera/array.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The :mod:`picamera.array` module provides a set of classes which aid in
32 | constructing n-dimensional `numpy`_ arrays from the camera output. In order to
33 | avoid adding a hard dependency on numpy to picamera, the module is not
34 | automatically imported by the main picamera package and must be explicitly
35 | imported.
36 |
37 | .. _numpy: http://www.numpy.org/
38 |
39 | The following classes are defined in the module:
40 |
41 |
42 | PiArrayOutput
43 | =============
44 |
45 | .. autoclass:: PiArrayOutput
46 | :members:
47 |
48 |
49 | PiRGBArray
50 | ==========
51 |
52 | .. autoclass:: PiRGBArray
53 |
54 |
55 | PiYUVArray
56 | ==========
57 |
58 | .. autoclass:: PiYUVArray
59 |
60 |
61 | PiBayerArray
62 | ============
63 |
64 | .. autoclass:: PiBayerArray
65 |
66 |
67 | PiMotionArray
68 | =============
69 |
70 | .. autoclass:: PiMotionArray
71 |
72 |
73 | PiAnalysisOutput
74 | ================
75 |
76 | .. autoclass:: PiAnalysisOutput
77 | :members:
78 |
79 |
80 | PiRGBAnalysis
81 | =============
82 |
83 | .. autoclass:: PiRGBAnalysis
84 |
85 |
86 | PiYUVAnalysis
87 | =============
88 |
89 | .. autoclass:: PiYUVAnalysis
90 |
91 |
92 | PiMotionAnalysis
93 | ================
94 |
95 | .. autoclass:: PiMotionAnalysis
96 | """
97 |
98 | from __future__ import (
99 | unicode_literals,
100 | print_function,
101 | division,
102 | absolute_import,
103 | )
104 |
105 | # Make Py2's str and range equivalent to Py3's
106 | native_str = str
107 | str = type('')
108 | try:
109 | range = xrange
110 | except NameError:
111 | pass
112 |
113 | import io
114 | import warnings
115 |
116 | import numpy as np
117 | from numpy.lib.stride_tricks import as_strided
118 |
119 | from .exc import PiCameraValueError, PiCameraDeprecated
120 |
121 |
122 | motion_dtype = np.dtype([
123 | (native_str('x'), np.int8),
124 | (native_str('y'), np.int8),
125 | (native_str('sad'), np.uint16),
126 | ])
127 |
128 |
129 | def raw_resolution(resolution):
130 | """
131 | Round a (width, height) tuple up to the nearest multiple of 32 horizontally
132 | and 16 vertically (as this is what the Pi's camera module does for
133 | unencoded output).
134 | """
135 | width, height = resolution
136 | fwidth = (width + 31) // 32 * 32
137 | fheight = (height + 15) // 16 * 16
138 | return fwidth, fheight
139 |
140 |
141 | def bytes_to_yuv(data, resolution):
142 | """
143 | Converts a bytes object containing YUV data to a `numpy`_ array.
144 | """
145 | width, height = resolution
146 | fwidth, fheight = raw_resolution(resolution)
147 | y_len = fwidth * fheight
148 | uv_len = (fwidth // 2) * (fheight // 2)
149 | if len(data) != (y_len + 2 * uv_len):
150 | raise PiCameraValueError(
151 | 'Incorrect buffer length for resolution %dx%d' % (width, height))
152 | # Separate out the Y, U, and V values from the array
153 | a = np.frombuffer(data, dtype=np.uint8)
154 | Y = a[:y_len]
155 | U = a[y_len:-uv_len]
156 | V = a[-uv_len:]
157 | # Reshape the values into two dimensions, and double the size of the
158 | # U and V values (which only have quarter resolution in YUV4:2:0)
159 | Y = Y.reshape((fheight, fwidth))
160 | U = U.reshape((fheight // 2, fwidth // 2)).repeat(2, axis=0).repeat(2, axis=1)
161 | V = V.reshape((fheight // 2, fwidth // 2)).repeat(2, axis=0).repeat(2, axis=1)
162 | # Stack the channels together and crop to the actual resolution
163 | return np.dstack((Y, U, V))[:height, :width]
164 |
165 |
166 | def bytes_to_rgb(data, resolution):
167 | """
168 | Converts a bytes objects containing RGB/BGR data to a `numpy`_ array.
169 | """
170 | width, height = resolution
171 | fwidth, fheight = raw_resolution(resolution)
172 | if len(data) != (fwidth * fheight * 3):
173 | raise PiCameraValueError(
174 | 'Incorrect buffer length for resolution %dx%d' % (width, height))
175 | # Crop to the actual resolution
176 | return np.frombuffer(data, dtype=np.uint8).\
177 | reshape((fheight, fwidth, 3))[:height, :width, :]
178 |
179 |
180 | class PiArrayOutput(io.BytesIO):
181 | """
182 | Base class for capture arrays.
183 |
184 | This class extends :class:`io.BytesIO` with a `numpy`_ array which is
185 | intended to be filled when :meth:`~io.IOBase.flush` is called (i.e. at the
186 | end of capture).
187 |
188 | .. attribute:: array
189 |
190 | After :meth:`~io.IOBase.flush` is called, this attribute contains the
191 | frame's data as a multi-dimensional `numpy`_ array. This is typically
192 | organized with the dimensions ``(rows, columns, plane)``. Hence, an
193 | RGB image with dimensions *x* and *y* would produce an array with shape
194 | ``(y, x, 3)``.
195 | """
196 |
197 | def __init__(self, camera, size=None):
198 | super(PiArrayOutput, self).__init__()
199 | self.camera = camera
200 | self.size = size
201 | self.array = None
202 |
203 | def close(self):
204 | super(PiArrayOutput, self).close()
205 | self.array = None
206 |
207 | def truncate(self, size=None):
208 | """
209 | Resize the stream to the given size in bytes (or the current position
210 | if size is not specified). This resizing can extend or reduce the
211 | current file size. The new file size is returned.
212 |
213 | In prior versions of picamera, truncation also changed the position of
214 | the stream (because prior versions of these stream classes were
215 | non-seekable). This functionality is now deprecated; scripts should
216 | use :meth:`~io.IOBase.seek` and :meth:`truncate` as one would with
217 | regular :class:`~io.BytesIO` instances.
218 | """
219 | if size is not None:
220 | warnings.warn(
221 | PiCameraDeprecated(
222 | 'This method changes the position of the stream to the '
223 | 'truncated length; this is deprecated functionality and '
224 | 'you should not rely on it (seek before or after truncate '
225 | 'to ensure position is consistent)'))
226 | super(PiArrayOutput, self).truncate(size)
227 | if size is not None:
228 | self.seek(size)
229 |
230 |
231 | class PiRGBArray(PiArrayOutput):
232 | """
233 | Produces a 3-dimensional RGB array from an RGB capture.
234 |
235 | This custom output class can be used to easily obtain a 3-dimensional numpy
236 | array, organized (rows, columns, colors), from an unencoded RGB capture.
237 | The array is accessed via the :attr:`~PiArrayOutput.array` attribute. For
238 | example::
239 |
240 | import picamera
241 | import picamera.array
242 |
243 | with picamera.PiCamera() as camera:
244 | with picamera.array.PiRGBArray(camera) as output:
245 | camera.capture(output, 'rgb')
246 | print('Captured %dx%d image' % (
247 | output.array.shape[1], output.array.shape[0]))
248 |
249 | You can re-use the output to produce multiple arrays by emptying it with
250 | ``truncate(0)`` between captures::
251 |
252 | import picamera
253 | import picamera.array
254 |
255 | with picamera.PiCamera() as camera:
256 | with picamera.array.PiRGBArray(camera) as output:
257 | camera.resolution = (1280, 720)
258 | camera.capture(output, 'rgb')
259 | print('Captured %dx%d image' % (
260 | output.array.shape[1], output.array.shape[0]))
261 | output.truncate(0)
262 | camera.resolution = (640, 480)
263 | camera.capture(output, 'rgb')
264 | print('Captured %dx%d image' % (
265 | output.array.shape[1], output.array.shape[0]))
266 |
267 | If you are using the GPU resizer when capturing (with the *resize*
268 | parameter of the various :meth:`~picamera.camera.PiCamera.capture`
269 | methods), specify the resized resolution as the optional *size* parameter
270 | when constructing the array output::
271 |
272 | import picamera
273 | import picamera.array
274 |
275 | with picamera.PiCamera() as camera:
276 | camera.resolution = (1280, 720)
277 | with picamera.array.PiRGBArray(camera, size=(640, 360)) as output:
278 | camera.capture(output, 'rgb', resize=(640, 360))
279 | print('Captured %dx%d image' % (
280 | output.array.shape[1], output.array.shape[0]))
281 | """
282 |
283 | def flush(self):
284 | super(PiRGBArray, self).flush()
285 | self.array = bytes_to_rgb(self.getvalue(), self.size or self.camera.resolution)
286 |
287 |
288 | class PiYUVArray(PiArrayOutput):
289 | """
290 | Produces 3-dimensional YUV & RGB arrays from a YUV capture.
291 |
292 | This custom output class can be used to easily obtain a 3-dimensional numpy
293 | array, organized (rows, columns, channel), from an unencoded YUV capture.
294 | The array is accessed via the :attr:`~PiArrayOutput.array` attribute. For
295 | example::
296 |
297 | import picamera
298 | import picamera.array
299 |
300 | with picamera.PiCamera() as camera:
301 | with picamera.array.PiYUVArray(camera) as output:
302 | camera.capture(output, 'yuv')
303 | print('Captured %dx%d image' % (
304 | output.array.shape[1], output.array.shape[0]))
305 |
306 | The :attr:`rgb_array` attribute can be queried for the equivalent RGB
307 | array (conversion is performed using the `ITU-R BT.601`_ matrix)::
308 |
309 | import picamera
310 | import picamera.array
311 |
312 | with picamera.PiCamera() as camera:
313 | with picamera.array.PiYUVArray(camera) as output:
314 | camera.resolution = (1280, 720)
315 | camera.capture(output, 'yuv')
316 | print(output.array.shape)
317 | print(output.rgb_array.shape)
318 |
319 | If you are using the GPU resizer when capturing (with the *resize*
320 | parameter of the various :meth:`~picamera.camera.PiCamera.capture`
321 | methods), specify the resized resolution as the optional *size* parameter
322 | when constructing the array output::
323 |
324 | import picamera
325 | import picamera.array
326 |
327 | with picamera.PiCamera() as camera:
328 | camera.resolution = (1280, 720)
329 | with picamera.array.PiYUVArray(camera, size=(640, 360)) as output:
330 | camera.capture(output, 'yuv', resize=(640, 360))
331 | print('Captured %dx%d image' % (
332 | output.array.shape[1], output.array.shape[0]))
333 |
334 | .. _ITU-R BT.601: http://en.wikipedia.org/wiki/YCbCr#ITU-R_BT.601_conversion
335 | """
336 |
337 | def __init__(self, camera, size=None):
338 | super(PiYUVArray, self).__init__(camera, size)
339 | self._rgb = None
340 |
341 | def flush(self):
342 | super(PiYUVArray, self).flush()
343 | self.array = bytes_to_yuv(self.getvalue(), self.size or self.camera.resolution)
344 |
345 | @property
346 | def rgb_array(self):
347 | if self._rgb is None:
348 | # Apply the standard biases
349 | YUV = self.array.copy()
350 | YUV[:, :, 0] = YUV[:, :, 0] - 16 # Offset Y by 16
351 | YUV[:, :, 1:] = YUV[:, :, 1:] - 128 # Offset UV by 128
352 | # YUV conversion matrix from ITU-R BT.601 version (SDTV)
353 | # Y U V
354 | M = np.array([[1.164, 0.000, 1.596], # R
355 | [1.164, -0.392, -0.813], # G
356 | [1.164, 2.017, 0.000]]) # B
357 | # Calculate the dot product with the matrix to produce RGB output,
358 | # clamp the results to byte range and convert to bytes
359 | self._rgb = YUV.dot(M.T).clip(0, 255).astype(np.uint8)
360 | return self._rgb
361 |
362 |
363 | class PiBayerArray(PiArrayOutput):
364 | """
365 | Produces a 3-dimensional RGB array from raw Bayer data.
366 |
367 | This custom output class is intended to be used with the
368 | :meth:`~picamera.camera.PiCamera.capture` method, with the *bayer*
369 | parameter set to ``True``, to include raw Bayer data in the JPEG output.
370 | The class strips out the raw data, constructing a 3-dimensional numpy array
371 | organized as (rows, columns, colors). The resulting data is accessed via
372 | the :attr:`~PiArrayOutput.array` attribute::
373 |
374 | import picamera
375 | import picamera.array
376 |
377 | with picamera.PiCamera() as camera:
378 | with picamera.array.PiBayerArray(camera) as output:
379 | camera.capture(output, 'jpeg', bayer=True)
380 | print(output.array.shape)
381 |
382 | Note that Bayer data is *always* full resolution, so the resulting array
383 | always has the shape (1944, 2592, 3); this also implies that the optional
384 | *size* parameter (for specifying a resizer resolution) is not available
385 | with this array class. As the sensor records 10-bit values, the array uses
386 | the unsigned 16-bit integer data type.
387 |
388 | By default, `de-mosaicing`_ is **not** performed; if the resulting array is
389 | viewed it will therefore appear dark and too green (due to the green bias
390 | in the `Bayer pattern`_). A trivial weighted-average demosaicing algorithm
391 | is provided in the :meth:`demosaic` method::
392 |
393 | import picamera
394 | import picamera.array
395 |
396 | with picamera.PiCamera() as camera:
397 | with picamera.array.PiBayerArray(camera) as output:
398 | camera.capture(output, 'jpeg', bayer=True)
399 | print(output.demosaic().shape)
400 |
401 | Viewing the result of the de-mosaiced data will look more normal but still
402 | considerably worse quality than the regular camera output (as none of the
403 | other usual post-processing steps like auto-exposure, white-balance,
404 | vignette compensation, and smoothing have been performed).
405 |
406 | .. _de-mosaicing: http://en.wikipedia.org/wiki/Demosaicing
407 | .. _Bayer pattern: http://en.wikipedia.org/wiki/Bayer_filter
408 | """
409 |
410 | def __init__(self, camera):
411 | super(PiBayerArray, self).__init__(camera, size=None)
412 | self._demo = None
413 |
414 | def flush(self):
415 | super(PiBayerArray, self).flush()
416 | self._demo = None
417 | data = self.getvalue()[-6404096:]
418 | if data[:4] != b'BRCM':
419 | raise PiCameraValueError('Unable to locate Bayer data at end of buffer')
420 | # Strip header
421 | data = data[32768:]
422 | # Reshape into 2D pixel values
423 | data = np.frombuffer(data, dtype=np.uint8).\
424 | reshape((1952, 3264))[:1944, :3240]
425 | # Unpack 10-bit values; every 5 bytes contains the high 8-bits of 4
426 | # values followed by the low 2-bits of 4 values packed into the fifth
427 | # byte
428 | data = data.astype(np.uint16) << 2
429 | for byte in range(4):
430 | data[:, byte::5] |= ((data[:, 4::5] >> ((4 - byte) * 2)) & 3)
431 | data = np.delete(data, np.s_[4::5], 1)
432 | # XXX Should test camera's vflip and hflip settings here and adjust
433 | self.array = np.zeros(data.shape + (3,), dtype=data.dtype)
434 | self.array[1::2, 0::2, 0] = data[1::2, 0::2] # Red
435 | self.array[0::2, 0::2, 1] = data[0::2, 0::2] # Green
436 | self.array[1::2, 1::2, 1] = data[1::2, 1::2] # Green
437 | self.array[0::2, 1::2, 2] = data[0::2, 1::2] # Blue
438 |
439 | def demosaic(self):
440 | if self._demo is None:
441 | # XXX Again, should take into account camera's vflip and hflip here
442 | # Construct representation of the bayer pattern
443 | bayer = np.zeros(self.array.shape, dtype=np.uint8)
444 | bayer[1::2, 0::2, 0] = 1 # Red
445 | bayer[0::2, 0::2, 1] = 1 # Green
446 | bayer[1::2, 1::2, 1] = 1 # Green
447 | bayer[0::2, 1::2, 2] = 1 # Blue
448 | # Allocate output array with same shape as data and set up some
449 | # constants to represent the weighted average window
450 | window = (3, 3)
451 | borders = (window[0] - 1, window[1] - 1)
452 | border = (borders[0] // 2, borders[1] // 2)
453 | # Pad out the data and the bayer pattern (np.pad is faster but
454 | # unavailable on the version of numpy shipped with Raspbian at the
455 | # time of writing)
456 | rgb = np.zeros((
457 | self.array.shape[0] + borders[0],
458 | self.array.shape[1] + borders[1],
459 | self.array.shape[2]), dtype=self.array.dtype)
460 | rgb[
461 | border[0]:rgb.shape[0] - border[0],
462 | border[1]:rgb.shape[1] - border[1],
463 | :] = self.array
464 | bayer_pad = np.zeros((
465 | self.array.shape[0] + borders[0],
466 | self.array.shape[1] + borders[1],
467 | self.array.shape[2]), dtype=bayer.dtype)
468 | bayer_pad[
469 | border[0]:bayer_pad.shape[0] - border[0],
470 | border[1]:bayer_pad.shape[1] - border[1],
471 | :] = bayer
472 | bayer = bayer_pad
473 | # For each plane in the RGB data, construct a view over the plane
474 | # of 3x3 matrices. Then do the same for the bayer array and use
475 | # Einstein summation to get the weighted average
476 | self._demo = np.empty(self.array.shape, dtype=self.array.dtype)
477 | for plane in range(3):
478 | p = rgb[..., plane]
479 | b = bayer[..., plane]
480 | pview = as_strided(p, shape=(
481 | p.shape[0] - borders[0],
482 | p.shape[1] - borders[1]) + window, strides=p.strides * 2)
483 | bview = as_strided(b, shape=(
484 | b.shape[0] - borders[0],
485 | b.shape[1] - borders[1]) + window, strides=b.strides * 2)
486 | psum = np.einsum('ijkl->ij', pview)
487 | bsum = np.einsum('ijkl->ij', bview)
488 | self._demo[..., plane] = psum // bsum
489 | return self._demo
490 |
491 |
492 | class PiMotionArray(PiArrayOutput):
493 | """
494 | Produces a 3-dimensional array of motion vectors from the H.264 encoder.
495 |
496 | This custom output class is intended to be used with the *motion_output*
497 | parameter of the :meth:`~picamera.camera.PiCamera.start_recording` method.
498 | Once recording has finished, the class generates a 3-dimensional numpy
499 | array organized as (frames, rows, columns) where ``rows`` and ``columns``
500 | are the number of rows and columns of `macro-blocks`_ (16x16 pixel blocks)
501 | in the original frames. There is always one extra column of macro-blocks
502 | present in motion vector data.
503 |
504 | The data-type of the :attr:`~PiArrayOutput.array` is an (x, y, sad)
505 | structure where ``x`` and ``y`` are signed 1-byte values, and ``sad`` is an
506 | unsigned 2-byte value representing the `sum of absolute differences`_ of
507 | the block. For example::
508 |
509 | import picamera
510 | import picamera.array
511 |
512 | with picamera.PiCamera() as camera:
513 | with picamera.array.PiMotionArray(camera) as output:
514 | camera.resolution = (640, 480)
515 | camera.start_recording(
516 | '/dev/null', format='h264', motion_output=output)
517 | camera.wait_recording(30)
518 | camera.stop_recording()
519 | print('Captured %d frames' % output.array.shape[0])
520 | print('Frames are %dx%d blocks big' % (
521 | output.array.shape[2], output.array.shape[1]))
522 |
523 | If you are using the GPU resizer with your recording, use the optional
524 | *size* parameter to specify the resizer's output resolution when
525 | constructing the array::
526 |
527 | import picamera
528 | import picamera.array
529 |
530 | with picamera.PiCamera() as camera:
531 | camera.resolution = (640, 480)
532 | with picamera.array.PiMotionArray(camera, size=(320, 240)) as output:
533 | camera.start_recording(
534 | '/dev/null', format='h264', motion_output=output,
535 | resize=(320, 240))
536 | camera.wait_recording(30)
537 | camera.stop_recording()
538 | print('Captured %d frames' % output.array.shape[0])
539 | print('Frames are %dx%d blocks big' % (
540 | output.array.shape[2], output.array.shape[1]))
541 |
542 | .. note::
543 |
544 | This class is not suitable for real-time analysis of motion vector
545 | data. See the :class:`PiMotionAnalysis` class instead.
546 |
547 | .. _macro-blocks: http://en.wikipedia.org/wiki/Macroblock
548 | .. _sum of absolute differences: http://en.wikipedia.org/wiki/Sum_of_absolute_differences
549 | """
550 |
551 | def flush(self):
552 | super(PiMotionArray, self).flush()
553 | width, height = self.size or self.camera.resolution
554 | cols = ((width + 15) // 16) + 1
555 | rows = (height + 15) // 16
556 | b = self.getvalue()
557 | frames = len(b) // (cols * rows * motion_dtype.itemsize)
558 | self.array = np.frombuffer(b, dtype=motion_dtype).reshape((frames, rows, cols))
559 |
560 |
561 | class PiAnalysisOutput(io.IOBase):
562 | """
563 | Base class for analysis outputs.
564 |
565 | This class extends :class:`io.IOBase` with a stub :meth:`analyse` method
566 | which will be called for each frame output. In this base implementation the
567 | method simply raises :exc:`NotImplementedError`.
568 | """
569 |
570 | def __init__(self, camera, size=None):
571 | super(PiAnalysisOutput, self).__init__()
572 | self.camera = camera
573 | self.size = size
574 |
575 | def writeable(self):
576 | return True
577 |
578 | def write(self, b):
579 | return len(b)
580 |
581 | def analyse(self, array):
582 | """
583 | Stub method for users to override.
584 | """
585 | raise NotImplementedError
586 |
587 |
588 | class PiRGBAnalysis(PiAnalysisOutput):
589 | """
590 | Provides a basis for per-frame RGB analysis classes.
591 |
592 | This custom output class is intended to be used with the
593 | :meth:`~picamera.camera.PiCamera.start_recording` method when it is called
594 | with *format* set to ``'rgb'`` or ``'bgr'``. While recording is in
595 | progress, the :meth:`~PiAnalysisOutput.write` method converts incoming
596 | frame data into a numpy array and calls the stub
597 | :meth:`~PiAnalysisOutput.analyse` method with the resulting array (this
598 | deliberately raises :exc:`NotImplementedError` in this class; you must
599 | override it in your descendent class).
600 |
601 | .. warning::
602 |
603 | Because the :meth:`~PiAnalysisOutput.analyse` method will be running
604 | within the encoder's callback, it must be **fast**. Specifically, it
605 | needs to return before the next frame is produced. Therefore, if the
606 | camera is running at 30fps, analyse cannot take more than 1/30s or 33ms
607 | to execute (and should take considerably less given that this doesn't
608 | take into account encoding overhead). You may wish to adjust the
609 | framerate of the camera accordingly.
610 |
611 | The array passed to :meth:`~PiAnalysisOutput.analyse` is organized as
612 | (rows, columns, channel) where the channels 0, 1, and 2 are R, G, and B
613 | respectively (or B, G, R if *format* is ``'bgr'``).
614 | """
615 |
616 | def write(self, b):
617 | result = super(PiRGBAnalysis, self).write(b)
618 | self.analyse(bytes_to_rgb(b, self.size or self.camera.resolution))
619 | return result
620 |
621 |
622 | class PiYUVAnalysis(PiAnalysisOutput):
623 | """
624 | Provides a basis for per-frame YUV analysis classes.
625 |
626 | This custom output class is intended to be used with the
627 | :meth:`~picamera.camera.PiCamera.start_recording` method when it is called
628 | with *format* set to ``'yuv'``. While recording is in progress, the
629 | :meth:`~PiAnalysisOutput.write` method converts incoming frame data into a
630 | numpy array and calls the stub :meth:`~PiAnalysisOutput.analyse` method
631 | with the resulting array (this deliberately raises
632 | :exc:`NotImplementedError` in this class; you must override it in your
633 | descendent class).
634 |
635 | .. warning::
636 |
637 | Because the :meth:`~PiAnalysisOutput.analyse` method will be running
638 | within the encoder's callback, it must be **fast**. Specifically, it
639 | needs to return before the next frame is produced. Therefore, if the
640 | camera is running at 30fps, analyse cannot take more than 1/30s or 33ms
641 | to execute (and should take considerably less given that this doesn't
642 | take into account encoding overhead). You may wish to adjust the
643 | framerate of the camera accordingly.
644 |
645 | The array passed to :meth:`~PiAnalysisOutput.analyse` is organized as
646 | (rows, columns, channel) where the channel 0 is Y (luminance), while 1 and
647 | 2 are U and V (chrominance) respectively. The chrominance values normally
648 | have quarter resolution of the luminance values but this class makes all
649 | channels equal resolution for ease of use.
650 | """
651 |
652 | def write(self, b):
653 | result = super(PiYUVAnalysis, self).write(b)
654 | self.analyse(bytes_to_yuv(b, self.size or self.camera.resolution))
655 | return result
656 |
657 |
658 | class PiMotionAnalysis(PiAnalysisOutput):
659 | """
660 | Provides a basis for real-time motion analysis classes.
661 |
662 | This custom output class is intended to be used with the *motion_output*
663 | parameter of the :meth:`~picamera.camera.PiCamera.start_recording` method.
664 | While recording is in progress, the write method converts incoming motion
665 | data into numpy arrays and calls the stub :meth:`~PiAnalysisOutput.analyse`
666 | method with the resulting array (which deliberately raises
667 | :exc:`NotImplementedError` in this class).
668 |
669 | .. warning::
670 |
671 | Because the :meth:`~PiAnalysisOutput.analyse` method will be running
672 | within the encoder's callback, it must be **fast**. Specifically, it
673 | needs to return before the next frame is produced. Therefore, if the
674 | camera is running at 30fps, analyse cannot take more than 1/30s or 33ms
675 | to execute (and should take considerably less given that this doesn't
676 | take into account encoding overhead). You may wish to adjust the
677 | framerate of the camera accordingly.
678 |
679 | The array passed to :meth:`~PiAnalysisOutput.analyse` is organized as
680 | (rows, columns) where ``rows`` and ``columns`` are the number of rows and
681 | columns of `macro-blocks`_ (16x16 pixel blocks) in the original frames.
682 | There is always one extra column of macro-blocks present in motion vector
683 | data.
684 |
685 | The data-type of the array is an (x, y, sad) structure where ``x`` and
686 | ``y`` are signed 1-byte values, and ``sad`` is an unsigned 2-byte value
687 | representing the `sum of absolute differences`_ of the block.
688 |
689 | An example of a crude motion detector is given below::
690 |
691 | import numpy as np
692 | import picamera
693 | import picamera.array
694 |
695 | class DetectMotion(picamera.array.PiMotionAnalysis):
696 | def analyse(self, a):
697 | a = np.sqrt(
698 | np.square(a['x'].astype(np.float)) +
699 | np.square(a['y'].astype(np.float))
700 | ).clip(0, 255).astype(np.uint8)
701 | # If there're more than 10 vectors with a magnitude greater
702 | # than 60, then say we've detected motion
703 | if (a > 60).sum() > 10:
704 | print('Motion detected!')
705 |
706 | with picamera.PiCamera() as camera:
707 | with DetectMotion(camera) as output:
708 | camera.resolution = (640, 480)
709 | camera.start_recording(
710 | '/dev/null', format='h264', motion_output=output)
711 | camera.wait_recording(30)
712 | camera.stop_recording()
713 |
714 | You can use the optional *size* parameter to specify the output resolution
715 | of the GPU resizer, if you are using the *resize* parameter of
716 | :meth:`~picamera.camera.PiCamera.start_recording`.
717 | """
718 |
719 | def __init__(self, camera, size=None):
720 | super(PiMotionAnalysis, self).__init__(camera, size)
721 | self.cols = None
722 | self.rows = None
723 |
724 | def write(self, b):
725 | result = super(PiMotionAnalysis, self).write(b)
726 | if self.cols is None:
727 | width, height = self.size or self.camera.resolution
728 | self.cols = ((width + 15) // 16) + 1
729 | self.rows = (height + 15) // 16
730 | self.analyse(
731 | np.frombuffer(b, dtype=motion_dtype).\
732 | reshape((self.rows, self.cols)))
733 | return result
734 |
735 |
--------------------------------------------------------------------------------
/picamera/bcm_host.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python header conversion
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Original headers
7 | # Copyright (c) 2012, Broadcom Europe Ltd
8 | # All rights reserved.
9 | #
10 | # Redistribution and use in source and binary forms, with or without
11 | # modification, are permitted provided that the following conditions are met:
12 | #
13 | # * Redistributions of source code must retain the above copyright
14 | # notice, this list of conditions and the following disclaimer.
15 | # * Redistributions in binary form must reproduce the above copyright
16 | # notice, this list of conditions and the following disclaimer in the
17 | # documentation and/or other materials provided with the distribution.
18 | # * Neither the name of the copyright holder nor the
19 | # names of its contributors may be used to endorse or promote products
20 | # derived from this software without specific prior written permission.
21 | #
22 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
26 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
29 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
30 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
31 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 | # POSSIBILITY OF SUCH DAMAGE.
33 |
34 | from __future__ import (
35 | unicode_literals,
36 | print_function,
37 | division,
38 | absolute_import,
39 | )
40 |
41 | # Make Py2's str equivalent to Py3's
42 | str = type('')
43 |
44 | import ctypes as ct
45 | import warnings
46 |
47 | _lib = ct.CDLL('libbcm_host.so')
48 |
49 | # bcm_host.h #################################################################
50 |
51 | bcm_host_init = _lib.bcm_host_init
52 | bcm_host_init.argtypes = []
53 | bcm_host_init.restype = None
54 |
55 | bcm_host_deinit = _lib.bcm_host_deinit
56 | bcm_host_deinit.argtypes = []
57 | bcm_host_deinit.restype = None
58 |
59 | graphics_get_display_size = _lib.graphics_get_display_size
60 | graphics_get_display_size.argtypes = [ct.c_uint16, ct.POINTER(ct.c_uint32), ct.POINTER(ct.c_uint32)]
61 | graphics_get_display_size.restype = ct.c_int32
62 |
63 |
--------------------------------------------------------------------------------
/picamera/color.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The color module defines a class for representing a color, along with various
32 | ancillary classes which can be used to manipulate aspects of a color.
33 |
34 | .. note::
35 |
36 | All classes in this module are available from the :mod:`picamera` namespace
37 | without having to import :mod:`picamera.color` directly.
38 |
39 | The following classes are defined in the module:
40 |
41 |
42 | Color
43 | =====
44 |
45 | .. autoclass:: Color
46 | :members:
47 |
48 |
49 | Red
50 | ===
51 |
52 | .. autoclass:: Red
53 | :members:
54 |
55 |
56 | Green
57 | =====
58 |
59 | .. autoclass:: Green
60 | :members:
61 |
62 |
63 | Blue
64 | ====
65 |
66 | .. autoclass:: Blue
67 | :members:
68 |
69 |
70 | Hue
71 | ===
72 |
73 | .. autoclass:: Hue
74 | :members:
75 |
76 |
77 | Saturation
78 | ==========
79 |
80 | .. autoclass:: Saturation
81 | :members:
82 |
83 |
84 | Lightness
85 | =========
86 |
87 | .. autoclass:: Lightness
88 | :members:
89 |
90 | """
91 |
92 | from __future__ import (
93 | unicode_literals,
94 | print_function,
95 | division,
96 | absolute_import,
97 | )
98 |
99 | # Make Py2's str and range equivalent to Py3's
100 | str = type('')
101 |
102 |
103 | import colorsys
104 | from math import pi
105 | from collections import namedtuple
106 |
107 |
108 | # From the CSS Color Module Level 3 specification, section 4.3
109 | #
110 | NAMED_COLORS = {
111 | 'aliceblue': '#f0f8ff',
112 | 'antiquewhite': '#faebd7',
113 | 'aqua': '#00ffff',
114 | 'aquamarine': '#7fffd4',
115 | 'azure': '#f0ffff',
116 | 'beige': '#f5f5dc',
117 | 'bisque': '#ffe4c4',
118 | 'black': '#000000',
119 | 'blanchedalmond': '#ffebcd',
120 | 'blue': '#0000ff',
121 | 'blueviolet': '#8a2be2',
122 | 'brown': '#a52a2a',
123 | 'burlywood': '#deb887',
124 | 'cadetblue': '#5f9ea0',
125 | 'chartreuse': '#7fff00',
126 | 'chocolate': '#d2691e',
127 | 'coral': '#ff7f50',
128 | 'cornflowerblue': '#6495ed',
129 | 'cornsilk': '#fff8dc',
130 | 'crimson': '#dc143c',
131 | 'cyan': '#00ffff',
132 | 'darkblue': '#00008b',
133 | 'darkcyan': '#008b8b',
134 | 'darkgoldenrod': '#b8860b',
135 | 'darkgray': '#a9a9a9',
136 | 'darkgreen': '#006400',
137 | 'darkgrey': '#a9a9a9',
138 | 'darkkhaki': '#bdb76b',
139 | 'darkmagenta': '#8b008b',
140 | 'darkolivegreen': '#556b2f',
141 | 'darkorange': '#ff8c00',
142 | 'darkorchid': '#9932cc',
143 | 'darkred': '#8b0000',
144 | 'darksalmon': '#e9967a',
145 | 'darkseagreen': '#8fbc8f',
146 | 'darkslateblue': '#483d8b',
147 | 'darkslategray': '#2f4f4f',
148 | 'darkslategrey': '#2f4f4f',
149 | 'darkturquoise': '#00ced1',
150 | 'darkviolet': '#9400d3',
151 | 'deeppink': '#ff1493',
152 | 'deepskyblue': '#00bfff',
153 | 'dimgray': '#696969',
154 | 'dimgrey': '#696969',
155 | 'dodgerblue': '#1e90ff',
156 | 'firebrick': '#b22222',
157 | 'floralwhite': '#fffaf0',
158 | 'forestgreen': '#228b22',
159 | 'fuchsia': '#ff00ff',
160 | 'gainsboro': '#dcdcdc',
161 | 'ghostwhite': '#f8f8ff',
162 | 'gold': '#ffd700',
163 | 'goldenrod': '#daa520',
164 | 'gray': '#808080',
165 | 'green': '#008000',
166 | 'greenyellow': '#adff2f',
167 | 'grey': '#808080',
168 | 'honeydew': '#f0fff0',
169 | 'hotpink': '#ff69b4',
170 | 'indianred': '#cd5c5c',
171 | 'indigo': '#4b0082',
172 | 'ivory': '#fffff0',
173 | 'khaki': '#f0e68c',
174 | 'lavender': '#e6e6fa',
175 | 'lavenderblush': '#fff0f5',
176 | 'lawngreen': '#7cfc00',
177 | 'lemonchiffon': '#fffacd',
178 | 'lightblue': '#add8e6',
179 | 'lightcoral': '#f08080',
180 | 'lightcyan': '#e0ffff',
181 | 'lightgoldenrodyellow': '#fafad2',
182 | 'lightgray': '#d3d3d3',
183 | 'lightgreen': '#90ee90',
184 | 'lightgrey': '#d3d3d3',
185 | 'lightpink': '#ffb6c1',
186 | 'lightsalmon': '#ffa07a',
187 | 'lightseagreen': '#20b2aa',
188 | 'lightskyblue': '#87cefa',
189 | 'lightslategray': '#778899',
190 | 'lightslategrey': '#778899',
191 | 'lightsteelblue': '#b0c4de',
192 | 'lightyellow': '#ffffe0',
193 | 'lime': '#00ff00',
194 | 'limegreen': '#32cd32',
195 | 'linen': '#faf0e6',
196 | 'magenta': '#ff00ff',
197 | 'maroon': '#800000',
198 | 'mediumaquamarine': '#66cdaa',
199 | 'mediumblue': '#0000cd',
200 | 'mediumorchid': '#ba55d3',
201 | 'mediumpurple': '#9370db',
202 | 'mediumseagreen': '#3cb371',
203 | 'mediumslateblue': '#7b68ee',
204 | 'mediumspringgreen': '#00fa9a',
205 | 'mediumturquoise': '#48d1cc',
206 | 'mediumvioletred': '#c71585',
207 | 'midnightblue': '#191970',
208 | 'mintcream': '#f5fffa',
209 | 'mistyrose': '#ffe4e1',
210 | 'moccasin': '#ffe4b5',
211 | 'navajowhite': '#ffdead',
212 | 'navy': '#000080',
213 | 'oldlace': '#fdf5e6',
214 | 'olive': '#808000',
215 | 'olivedrab': '#6b8e23',
216 | 'orange': '#ffa500',
217 | 'orangered': '#ff4500',
218 | 'orchid': '#da70d6',
219 | 'palegoldenrod': '#eee8aa',
220 | 'palegreen': '#98fb98',
221 | 'paleturquoise': '#afeeee',
222 | 'palevioletred': '#db7093',
223 | 'papayawhip': '#ffefd5',
224 | 'peachpuff': '#ffdab9',
225 | 'peru': '#cd853f',
226 | 'pink': '#ffc0cb',
227 | 'plum': '#dda0dd',
228 | 'powderblue': '#b0e0e6',
229 | 'purple': '#800080',
230 | 'red': '#ff0000',
231 | 'rosybrown': '#bc8f8f',
232 | 'royalblue': '#4169e1',
233 | 'saddlebrown': '#8b4513',
234 | 'salmon': '#fa8072',
235 | 'sandybrown': '#f4a460',
236 | 'seagreen': '#2e8b57',
237 | 'seashell': '#fff5ee',
238 | 'sienna': '#a0522d',
239 | 'silver': '#c0c0c0',
240 | 'skyblue': '#87ceeb',
241 | 'slateblue': '#6a5acd',
242 | 'slategray': '#708090',
243 | 'slategrey': '#708090',
244 | 'snow': '#fffafa',
245 | 'springgreen': '#00ff7f',
246 | 'steelblue': '#4682b4',
247 | 'tan': '#d2b48c',
248 | 'teal': '#008080',
249 | 'thistle': '#d8bfd8',
250 | 'tomato': '#ff6347',
251 | 'turquoise': '#40e0d0',
252 | 'violet': '#ee82ee',
253 | 'wheat': '#f5deb3',
254 | 'white': '#ffffff',
255 | 'whitesmoke': '#f5f5f5',
256 | 'yellow': '#ffff00',
257 | 'yellowgreen': '#9acd32',
258 | }
259 |
260 |
261 | class Red(float):
262 | """
263 | Represents the red component of a :class:`Color` for use in
264 | transformations. Instances of this class can be constructed directly with a
265 | float value, or by querying the :attr:`Color.red` attribute. Addition,
266 | subtraction, and multiplication are supported with :class:`Color`
267 | instances. For example::
268 |
269 | >>> Color.from_rgb(0, 0, 0) + Red(0.5)
270 |
271 | >>> Color('#f00') - Color('#900').red
272 |
273 | >>> (Red(0.1) * Color('red')).red
274 | Red(0.1)
275 | """
276 |
277 | def __repr__(self):
278 | return "Red(%s)" % self
279 |
280 |
281 | class Green(float):
282 | """
283 | Represents the green component of a :class:`Color` for use in
284 | transformations. Instances of this class can be constructed directly with
285 | a float value, or by querying the :attr:`Color.green` attribute. Addition,
286 | subtraction, and multiplication are supported with :class:`Color`
287 | instances. For example::
288 |
289 | >>> Color(0, 0, 0) + Green(0.1)
290 |
291 | >>> Color.from_yuv(1, -0.4, -0.6) - Green(1)
292 |
293 | >>> (Green(0.5) * Color('white')).rgb
294 | (Red(1.0), Green(0.5), Blue(1.0))
295 | """
296 |
297 | def __repr__(self):
298 | return "Green(%s)" % self
299 |
300 |
301 | class Blue(float):
302 | """
303 | Represents the blue component of a :class:`Color` for use in
304 | transformations. Instances of this class can be constructed directly with
305 | a float value, or by querying the :attr:`Color.blue` attribute. Addition,
306 | subtraction, and multiplication are supported with :class:`Color`
307 | instances. For example::
308 |
309 | >>> Color(0, 0, 0) + Blue(0.2)
310 |
311 | >>> Color.from_hls(0.5, 0.5, 1.0) - Blue(1)
312 |
313 | >>> Blue(0.9) * Color('white')
314 |
315 | """
316 |
317 | def __repr__(self):
318 | return "Blue(%s)" % self
319 |
320 |
321 | class Hue(float):
322 | """
323 | Represents the hue of a :class:`Color` for use in transformations.
324 | Instances of this class can be constructed directly with a float value in
325 | the range [0.0, 1.0) representing an angle around the `HSL hue wheel`_. As
326 | this is a circular mapping, 0.0 and 1.0 effectively mean the same thing,
327 | i.e. out of range values will be normalized into the range [0.0, 1.0).
328 |
329 | The class can also be constructed with the keyword arguments ``deg`` or
330 | ``rad`` if you wish to specify the hue value in degrees or radians instead,
331 | respectively. Instances can also be constructed by querying the
332 | :attr:`Color.hue` attribute.
333 |
334 | Addition, subtraction, and multiplication are supported with :class:`Color`
335 | instances. For example::
336 |
337 | >>> Color(1, 0, 0).hls
338 | (0.0, 0.5, 1.0)
339 | >>> (Color(1, 0, 0) + Hue(deg=180)).hls
340 | (0.5, 0.5, 1.0)
341 |
342 | Note that whilst multiplication by a :class:`Hue` doesn't make much sense,
343 | it is still supported. However, the circular nature of a hue value can lead
344 | to suprising effects. In particular, since 1.0 is equivalent to 0.0 the
345 | following may be observed::
346 |
347 | >>> (Hue(1.0) * Color.from_hls(0.5, 0.5, 1.0)).hls
348 | (0.0, 0.5, 1.0)
349 |
350 | .. _HSL hue wheel: https://en.wikipedia.org/wiki/Hue
351 | """
352 |
353 | def __new__(cls, n=None, deg=None, rad=None):
354 | if n is not None:
355 | return super(Hue, cls).__new__(cls, n % 1.0)
356 | elif deg is not None:
357 | return super(Hue, cls).__new__(cls, (deg / 360.0) % 1.0)
358 | elif rad is not None:
359 | return super(Hue, cls).__new__(cls, (rad / (2 * pi)) % 1.0)
360 | else:
361 | raise ValueError('You must specify a value, or deg or rad')
362 |
363 | def __repr__(self):
364 | return "Hue(deg=%s)" % self.deg
365 |
366 | @property
367 | def deg(self):
368 | return self * 360.0
369 |
370 | @property
371 | def rad(self):
372 | return self * 2 * pi
373 |
374 |
375 | class Lightness(float):
376 | """
377 | Represents the lightness of a :class:`Color` for use in transformations.
378 | Instances of this class can be constructed directly with a float value, or
379 | by querying the :attr:`Color.lightness` attribute. Addition, subtraction,
380 | and multiplication are supported with :class:`Color` instances. For
381 | example::
382 |
383 | >>> Color(0, 0, 0) + Lightness(0.1)
384 |
385 | >>> Color.from_rgb_bytes(0x80, 0x80, 0) - Lightness(0.2)
386 |
387 | >>> Lightness(0.9) * Color('wheat')
388 |
389 | """
390 |
391 | def __repr__(self):
392 | return "Lightness(%s)" % self
393 |
394 |
395 | class Saturation(float):
396 | """
397 | Represents the saturation of a :class:`Color` for use in transformations.
398 | Instances of this class can be constructed directly with a float value, or
399 | by querying the :attr:`Color.saturation` attribute. Addition, subtraction,
400 | and multiplication are supported with :class:`Color` instances. For
401 | example::
402 |
403 | >>> Color(0.9, 0.9, 0.6) + Saturation(0.1)
404 |
405 | >>> Color('red') - Saturation(1)
406 |
407 | >>> Saturation(0.5) * Color('wheat')
408 |
409 | """
410 |
411 | def __repr__(self):
412 | return "Lightness(%s)" % self
413 |
414 |
415 |
416 | clamp_float = lambda v: max(0.0, min(1.0, v))
417 | clamp_bytes = lambda v: max(0, min(255, v))
418 |
419 | class Color(namedtuple('Color', ('red', 'green', 'blue'))):
420 | """
421 | The Color class is a tuple which represents a color as red, green, and
422 | blue components.
423 |
424 | The class has a flexible constructor which allows you to create an instance
425 | from a variety of color systems including `RGB`_, `Y'UV`_, `Y'IQ`_, `HLS`_,
426 | and `HSV`_. There are also explicit constructors for each of these systems
427 | to allow you to force the use of a system in your code. For example, an
428 | instance of :class:`Color` can be constructed in any of the following
429 | ways::
430 |
431 | >>> Color('#f00')
432 |
433 | >>> Color('green')
434 |
435 | >>> Color(0, 0, 1)
436 |
437 | >>> Color(hue=0, saturation=1, value=0.5)
438 |
439 | >>> Color(y=0.4, u=-0.05, v=0.615)
440 |
441 |
442 | The specific forms that the default constructor will accept are enumerated
443 | below:
444 |
445 | +------------------------------+------------------------------------------+
446 | | Style | Description |
447 | +==============================+==========================================+
448 | | Single positional parameter | Equivalent to calling |
449 | | | :meth:`Color.from_string`. |
450 | +------------------------------+------------------------------------------+
451 | | Three positional parameters | Equivalent to calling |
452 | | | :meth:`Color.from_rgb` if all three |
453 | | | parameters are between 0.0 and 1.0, or |
454 | | | :meth:`Color.from_rgb_bytes` otherwise. |
455 | +------------------------------+ |
456 | | Three named parameters, | |
457 | | "r", "g", "b" | |
458 | +------------------------------+ |
459 | | Three named parameters, | |
460 | | "red", "green", "blue" | |
461 | +------------------------------+------------------------------------------+
462 | | Three named parameters, | Equivalent to calling |
463 | | "y", "u", "v" | :meth:`Color.from_yuv` if "y" is between |
464 | | | 0.0 and 1.0, "u" is between -0.436 and |
465 | | | 0.436, and "v" is between -0.615 and |
466 | | | 0.615, or :meth:`Color.from_yuv_bytes` |
467 | | | otherwise. |
468 | +------------------------------+------------------------------------------+
469 | | Three named parameters, | Equivalent to calling |
470 | | "y", "i", "q" | :meth:`Color.from_yiq`. |
471 | +------------------------------+------------------------------------------+
472 | | Three named parameters, | Equivalent to calling |
473 | | "h", "l", "s" | :meth:`Color.from_hls`. |
474 | +------------------------------+ |
475 | | Three named parameters, | |
476 | | "hue", "lightness", | |
477 | | "saturation" | |
478 | +------------------------------+------------------------------------------+
479 | | Three named parameters | Equivalent to calling |
480 | | "h", "s", "v" | :meth:`Color.from_hsv` |
481 | +------------------------------+ |
482 | | Three named parameters | |
483 | | "hue", "saturation", "value" | |
484 | +------------------------------+------------------------------------------+
485 |
486 | If the constructor parameters do not conform to any of the variants in the
487 | table above, a :exc:`ValueError` will be thrown.
488 |
489 | Internally, the color is *always* represented as 3 float values
490 | corresponding to the red, green, and blue components of the color. These
491 | values take a value from 0.0 to 1.0 (least to full intensity). The class
492 | provides several attributes which can be used to convert one color system
493 | into another::
494 |
495 | >>> Color('#f00').hls
496 | (0.0, 0.5, 1.0)
497 | >>> Color.from_string('green').hue
498 | Hue(deg=120.0)
499 | >>> Color.from_rgb_bytes(0, 0, 255).yuv
500 | (0.114, 0.435912, -0.099978)
501 |
502 | As :class:`Color` derives from tuple, instances are immutable. While this
503 | provides the advantage that they can be used as keys in a dict, it does
504 | mean that colors themselves cannot be directly manipulated (e.g. by
505 | reducing the red component).
506 |
507 | However, several auxilliary classes in the module provide the ability to
508 | perform simple transformations of colors via operators which produce a new
509 | :class:`Color` instance. For example::
510 |
511 | >>> Color('red') - Red(0.5)
512 |
513 | >>> Color('green') + Red(0.5)
514 |
515 | >>> Color.from_hls(0.5, 0.5, 1.0)
516 |
517 | >>> Color.from_hls(0.5, 0.5, 1.0) * Lightness(0.8)
518 |
519 | >>> (Color.from_hls(0.5, 0.5, 1.0) * Lightness(0.8)).hls
520 | (0.5, 0.4, 1.0)
521 |
522 | From the last example above one can see that even attributes not directly
523 | stored by the color (such as lightness) can be manipulated in this fashion.
524 | In this case a :class:`Color` instance is constructed from HLS (hue,
525 | lightness, saturation) values with a lightness of 0.5. This is multiplied
526 | by a :class:`Lightness` instance with a value of 0.8 which constructs a new
527 | :class:`Color` with the same hue and saturation, but a lightness of 0.5 *
528 | 0.8 = 0.4.
529 |
530 | If an instance is converted to a string (with :func:`str`) it will return a
531 | string containing the 7-character HTML code for the color (e.g. "#ff0000"
532 | for red). As can be seen in the examples above, a similar representation is
533 | returned for :func:`repr`.
534 |
535 | .. _RGB: https://en.wikipedia.org/wiki/RGB_color_space
536 | .. _Y'UV: https://en.wikipedia.org/wiki/YUV
537 | .. _Y'IQ: https://en.wikipedia.org/wiki/YIQ
538 | .. _HLS: https://en.wikipedia.org/wiki/HSL_and_HSV
539 | .. _HSV: https://en.wikipedia.org/wiki/HSL_and_HSV
540 | """
541 |
542 | def __new__(cls, *args, **kwargs):
543 | def from_rgb(r, g, b):
544 | if 0.0 <= r <= 1.0 and 0.0 <= g <= 1.0 and 0.0 <= b <= 1.0:
545 | return cls.from_rgb(r, g, b)
546 | else:
547 | return cls.from_rgb_bytes(r, g, b)
548 |
549 | def from_yuv(y, u, v):
550 | if 0.0 <= y <= 1.0 and -0.436 <= u <= 0.436 and -0.615 <= v <= 0.615:
551 | return cls.from_yuv(y, u, v)
552 | else:
553 | return cls.from_yuv_bytes(y, u, v)
554 |
555 | if kwargs:
556 | try:
557 | return {
558 | frozenset('rgb'): from_rgb,
559 | frozenset('yuv'): from_yuv,
560 | frozenset('yiq'): cls.from_yiq,
561 | frozenset('hls'): cls.from_hls,
562 | frozenset('hsv'): cls.from_hsv,
563 | frozenset(('red', 'green', 'blue')):
564 | lambda red, green, blue: from_rgb(red, green, blue),
565 | frozenset(('hue', 'lightness', 'saturation')):
566 | lambda hue, lightness, saturation: cls.from_hls(hue, lightness, saturation),
567 | frozenset(('hue', 'saturation', 'value')):
568 | lambda hue, saturation, value: cls.from_hsv(hue, saturation, value),
569 | }[frozenset(kwargs.keys())](**kwargs)
570 | except KeyError:
571 | pass
572 | else:
573 | if len(args) == 1:
574 | return cls.from_string(args[0])
575 | elif len(args) == 3:
576 | return from_rgb(*args)
577 | raise ValueError('Unable to construct Color from provided arguments')
578 |
579 | @classmethod
580 | def from_string(cls, s):
581 | """
582 | Construct a :class:`Color` from a 4 or 7 character CSS-like
583 | representation (e.g. "#f00" or "#ff0000" for red), or from one of the
584 | named colors (e.g. "green" or "wheat") from the `CSS standard`_. Any
585 | other string format will result in a :exc:`ValueError`.
586 |
587 | .. _CSS standard: http://www.w3.org/TR/css3-color/#svg-color
588 | """
589 | if isinstance(s, bytes):
590 | s = s.decode('ascii')
591 | if s.startswith('#'):
592 | if len(s) == 7:
593 | return cls.from_rgb_bytes(
594 | int(s[1:3], base=16),
595 | int(s[3:5], base=16),
596 | int(s[5:7], base=16)
597 | )
598 | elif len(s) == 4:
599 | return cls.from_rgb_bytes(
600 | int(s[1:2], base=16) * 0x11,
601 | int(s[2:3], base=16) * 0x11,
602 | int(s[3:4], base=16) * 0x11
603 | )
604 | raise ValueError('Unrecognized color format "%s"' % s)
605 | try:
606 | return cls.from_string(NAMED_COLORS[s.lower()])
607 | except KeyError:
608 | raise ValueError('Unrecognized color name "%s"' % s)
609 |
610 | @classmethod
611 | def from_rgb(cls, r, g, b):
612 | """
613 | Construct a :class:`Color` from three `RGB`_ float values between 0.0
614 | and 1.0.
615 | """
616 | return super(Color, cls).__new__(cls, r, g, b)
617 |
618 | @classmethod
619 | def from_rgb_bytes(cls, r, g, b):
620 | """
621 | Construct a :class:`Color` from three `RGB`_ byte values between 0 and
622 | 255.
623 | """
624 | return super(Color, cls).__new__(cls, r / 255.0, g / 255.0, b / 255.0)
625 |
626 | @classmethod
627 | def from_yuv(cls, y, u, v):
628 | """
629 | Construct a :class:`Color` from three `Y'UV`_ float values. The Y value
630 | may be between 0.0 and 1.0. U may be between -0.436 and 0.436, while
631 | V may be between -0.615 and 0.615.
632 | """
633 | return super(Color, cls).__new__(
634 | cls,
635 | clamp_float(y + 1.14 * v),
636 | clamp_float(y - 0.395 * u - 0.581 * v),
637 | clamp_float(y + 2.033 * u),
638 | )
639 |
640 | @classmethod
641 | def from_yuv_bytes(cls, y, u, v):
642 | """
643 | Construct a :class:`Color` from three `Y'UV`_ byte values between 0 and
644 | 255. The U and V values are biased by 128 to prevent negative values as
645 | is typical in video applications. The Y value is biased by 16 for the
646 | same purpose.
647 | """
648 | c = y - 16
649 | d = u - 128
650 | e = v - 128
651 | return cls.from_rgb_bytes(
652 | clamp_bytes((298 * c + 409 * e + 128) >> 8),
653 | clamp_bytes((298 * c - 100 * d - 208 * e + 128) >> 8),
654 | clamp_bytes((298 * c + 516 * d + 128) >> 8),
655 | )
656 |
657 | @classmethod
658 | def from_yiq(cls, y, i, q):
659 | """
660 | Construct a :class:`Color` from three `Y'IQ`_ float values. Y' can be
661 | between 0.0 and 1.0, while I and Q can be between -1.0 and 1.0.
662 | """
663 | return super(Color, cls).__new__(cls, *colorsys.yiq_to_rgb(y, i, q))
664 |
665 | @classmethod
666 | def from_hls(cls, h, l, s):
667 | """
668 | Construct a :class:`Color` from `HLS`_ (hue, lightness, saturation)
669 | floats between 0.0 and 1.0.
670 | """
671 | return super(Color, cls).__new__(cls, *colorsys.hls_to_rgb(h, l, s))
672 |
673 | @classmethod
674 | def from_hsv(cls, h, s, v):
675 | """
676 | Construct a :class:`Color` from `HSV`_ (hue, saturation, value) floats
677 | between 0.0 and 1.0.
678 | """
679 | return super(Color, cls).__new__(cls, *colorsys.hsv_to_rgb(h, s, v))
680 |
681 | def __add__(self, other):
682 | if isinstance(other, Red):
683 | return Color(clamp_float(self.red + other), self.green, self.blue)
684 | elif isinstance(other, Green):
685 | return Color(self.red, clamp_float(self.green + other), self.blue)
686 | elif isinstance(other, Blue):
687 | return Color(self.red, self.green, clamp_float(self.blue + other))
688 | elif isinstance(other, Hue):
689 | h, l, s = self.hls
690 | return Color.from_hls((h + other) % 1.0, l, s)
691 | elif isinstance(other, Lightness):
692 | h, l, s = self.hls
693 | return Color.from_hls(h, clamp_float(l + other), s)
694 | elif isinstance(other, Saturation):
695 | h, l, s = self.hls
696 | return Color.from_hls(h, l, clamp_float(s + other))
697 | return NotImplemented
698 |
699 | def __radd__(self, other):
700 | # Addition is commutative
701 | if isinstance(other, (Red, Green, Blue, Hue, Lightness, Saturation)):
702 | return self.__add__(other)
703 | return NotImplemented
704 |
705 | def __sub__(self, other):
706 | if isinstance(other, Red):
707 | return Color(clamp_float(self.red - other), self.green, self.blue)
708 | elif isinstance(other, Green):
709 | return Color(self.red, clamp_float(self.green - other), self.blue)
710 | elif isinstance(other, Blue):
711 | return Color(self.red, self.green, clamp_float(self.blue - other))
712 | elif isinstance(other, Hue):
713 | h, l, s = self.hls
714 | return Color.from_hls((h - other) % 1.0, l, s)
715 | elif isinstance(other, Lightness):
716 | h, l, s = self.hls
717 | return Color.from_hls(h, clamp_float(l - other), s)
718 | elif isinstance(other, Saturation):
719 | h, l, s = self.hls
720 | return Color.from_hls(h, l, clamp_float(s - other))
721 | return NotImplemented
722 |
723 | def __rsub__(self, other):
724 | if isinstance(other, Red):
725 | return Color(clamp_float(other - self.red), self.green, self.blue)
726 | elif isinstance(other, Green):
727 | return Color(self.red, clamp_float(other - self.green), self.blue)
728 | elif isinstance(other, Blue):
729 | return Color(self.red, self.green, clamp_float(other - self.blue))
730 | elif isinstance(other, Hue):
731 | h, l, s = self.hls
732 | return Color.from_hls((other - h) % 1.0, l, s)
733 | elif isinstance(other, Lightness):
734 | h, l, s = self.hls
735 | return Color.from_hls(h, clamp_float(other - l), s)
736 | elif isinstance(other, Saturation):
737 | h, l, s = self.hls
738 | return Color.from_hls(h, l, clamp_float(other - s))
739 | return NotImplemented
740 |
741 | def __mul__(self, other):
742 | if isinstance(other, Red):
743 | return Color(clamp_float(self.red * other), self.green, self.blue)
744 | elif isinstance(other, Green):
745 | return Color(self.red, clamp_float(self.green * other), self.blue)
746 | elif isinstance(other, Blue):
747 | return Color(self.red, self.green, clamp_float(self.blue * other))
748 | elif isinstance(other, Hue):
749 | h, l, s = self.hls
750 | return Color.from_hls((h * other) % 1.0, l, s)
751 | elif isinstance(other, Lightness):
752 | h, l, s = self.hls
753 | return Color.from_hls(h, clamp_float(l * other), s)
754 | elif isinstance(other, Saturation):
755 | h, l, s = self.hls
756 | return Color.from_hls(h, l, clamp_float(s * other))
757 | return NotImplemented
758 |
759 | def __rmul__(self, other):
760 | # Multiplication is commutative
761 | if isinstance(other, (Red, Green, Blue, Hue, Lightness, Saturation)):
762 | return self.__mul__(other)
763 |
764 | def __str__(self):
765 | return '#%02x%02x%02x' % self.rgb_bytes
766 |
767 | def __repr__(self):
768 | return '' % str(self)
769 |
770 | @property
771 | def rgb(self):
772 | """
773 | Returns a 3-tuple of (red, green, blue) float values (between 0.0 and
774 | 1.0).
775 | """
776 | return (self.red, self.green, self.blue)
777 |
778 | @property
779 | def rgb_bytes(self):
780 | """
781 | Returns a 3-tuple of (red, green, blue) byte values.
782 | """
783 | return (
784 | int(self.red * 255),
785 | int(self.green * 255),
786 | int(self.blue * 255),
787 | )
788 |
789 | @property
790 | def yuv(self):
791 | """
792 | Returns a 3-tuple of (y, u, v) float values; y values can be between
793 | 0.0 and 1.0, u values are between -0.436 and 0.436, and v values are
794 | between -0.615 and 0.615.
795 | """
796 | r, g, b = self.rgb
797 | y = 0.299 * r + 0.587 * g + 0.114 * b
798 | return (
799 | y,
800 | 0.492 * (b - y),
801 | 0.877 * (r - y),
802 | )
803 |
804 | @property
805 | def yuv_bytes(self):
806 | """
807 | Returns a 3-tuple of (y, u, v) byte values. Y values are biased by 16
808 | in the result to prevent negatives. U and V values are biased by 128
809 | for the same purpose.
810 | """
811 | r, g, b = self.rgb_bytes
812 | return (
813 | (( 66 * r + 129 * g + 25 * b + 128) >> 8) + 16,
814 | ((-38 * r - 73 * g + 112 * b + 128) >> 8) + 128,
815 | ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128,
816 | )
817 |
818 | @property
819 | def yiq(self):
820 | """
821 | Returns a 3-tuple of (y, i, q) float values; y values can be between
822 | 0.0 and 1.0, whilst i and q values can be between -1.0 and 1.0.
823 | """
824 | return colorsys.rgb_to_yiq(self.red, self.green, self.blue)
825 |
826 | @property
827 | def hls(self):
828 | """
829 | Returns a 3-tuple of (hue, lightness, saturation) float values (between
830 | 0.0 and 1.0).
831 | """
832 | return colorsys.rgb_to_hls(self.red, self.green, self.blue)
833 |
834 | @property
835 | def hsv(self):
836 | """
837 | Returns a 3-tuple of (hue, saturation, value) float values (between 0.0
838 | and 1.0).
839 | """
840 | return colorsys.rgb_to_hsv(self.red, self.green, self.blue)
841 |
842 | @property
843 | def red(self):
844 | """
845 | Returns the red component of the color as a :class:`Red` instance which
846 | can be used in operations with other :class:`Color` instances.
847 | """
848 | # super() calls needed here to avoid recursion
849 | return Red(super(Color, self).red)
850 |
851 | @property
852 | def green(self):
853 | """
854 | Returns the green component of the color as a :class:`Green` instance
855 | which can be used in operations with other :class:`Color` instances.
856 | """
857 | return Green(super(Color, self).green)
858 |
859 | @property
860 | def blue(self):
861 | """
862 | Returns the blue component of the color as a :class:`Blue` instance
863 | which can be used in operations with other :class:`Color` instances.
864 | """
865 | return Blue(super(Color, self).blue)
866 |
867 | @property
868 | def hue(self):
869 | """
870 | Returns the hue of the color as a :class:`Hue` instance which can be
871 | used in operations with other :class:`Color` instances.
872 | """
873 | return Hue(self.hls[0])
874 |
875 | @property
876 | def lightness(self):
877 | """
878 | Returns the lightness of the color as a :class:`Lightness` instance
879 | which can be used in operations with other :class:`Color` instances.
880 | """
881 | return Lightness(self.hls[1])
882 |
883 | @property
884 | def saturation(self):
885 | """
886 | Returns the saturation of the color as a :class:`Saturation` instance
887 | which can be used in operations with other :class:`Color` instances.
888 | """
889 | return Saturation(self.hls[2])
890 |
891 |
--------------------------------------------------------------------------------
/picamera/encoders.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The encoders module defines encoder classes for use by the camera. Most users
32 | will have no direct need to use these classes directly, but advanced users may
33 | find them useful as base classes for :ref:`custom_encoders`.
34 |
35 | .. note::
36 |
37 | All classes in this module are available from the :mod:`picamera` namespace
38 | without having to import :mod:`picamera.encoders` directly.
39 |
40 | The following classes are defined in the module:
41 |
42 |
43 | PiVideoFrameType
44 | ================
45 |
46 | .. autoclass:: PiVideoFrameType
47 | :members:
48 |
49 |
50 | PiVideoFrame
51 | ============
52 |
53 | .. autoclass:: PiVideoFrame(index, frame_type, frame_size, video_size, split_size, timestamp)
54 | :members:
55 |
56 |
57 | PiEncoder
58 | =========
59 |
60 | .. autoclass:: PiEncoder
61 | :members:
62 | :private-members:
63 |
64 |
65 | PiVideoEncoder
66 | ==============
67 |
68 | .. autoclass:: PiVideoEncoder
69 | :members:
70 | :private-members:
71 |
72 |
73 | PiImageEncoder
74 | ==============
75 |
76 | .. autoclass:: PiImageEncoder
77 | :members:
78 | :private-members:
79 |
80 |
81 | PiRawMixin
82 | ==========
83 |
84 | .. autoclass:: PiRawMixin
85 | :members:
86 | :private-members:
87 |
88 |
89 | PiCookedVideoEncoder
90 | ====================
91 |
92 | .. autoclass:: PiCookedVideoEncoder
93 | :members:
94 | :private-members:
95 |
96 |
97 | PiRawVideoEncoder
98 | =================
99 |
100 | .. autoclass:: PiRawVideoEncoder
101 | :members:
102 | :private-members:
103 |
104 |
105 | PiOneImageEncoder
106 | =================
107 |
108 | .. autoclass:: PiOneImageEncoder
109 | :members:
110 | :private-members:
111 |
112 |
113 | PiMultiImageEncoder
114 | ===================
115 |
116 | .. autoclass:: PiMultiImageEncoder
117 | :members:
118 | :private-members:
119 |
120 |
121 | PiRawImageMixin
122 | ===============
123 |
124 | .. autoclass:: PiRawImageMixin
125 | :members:
126 | :private-members:
127 |
128 |
129 | PiCookedOneImageEncoder
130 | =======================
131 |
132 | .. autoclass:: PiCookedOneImageEncoder
133 | :members:
134 | :private-members:
135 |
136 |
137 | PiRawOneImageEncoder
138 | ====================
139 |
140 | .. autoclass:: PiRawOneImageEncoder
141 | :members:
142 | :private-members:
143 |
144 |
145 | PiCookedMultiImageEncoder
146 | =========================
147 |
148 | .. autoclass:: PiCookedMultiImageEncoder
149 | :members:
150 | :private-members:
151 |
152 |
153 | PiRawMultiImageEncoder
154 | ======================
155 |
156 | .. autoclass:: PiRawMultiImageEncoder
157 | :members:
158 | :private-members:
159 |
160 | """
161 |
162 | from __future__ import (
163 | unicode_literals,
164 | print_function,
165 | division,
166 | absolute_import,
167 | )
168 |
169 | # Make Py2's str and range equivalent to Py3's
170 | str = type('')
171 | try:
172 | range = xrange
173 | except NameError:
174 | pass
175 |
176 | import io
177 | import datetime
178 | import threading
179 | import warnings
180 | import ctypes as ct
181 | from collections import namedtuple
182 |
183 | import picamera.mmal as mmal
184 | from picamera.exc import (
185 | mmal_check,
186 | PiCameraError,
187 | PiCameraMMALError,
188 | PiCameraValueError,
189 | PiCameraRuntimeError,
190 | PiCameraDeprecated,
191 | )
192 |
193 |
194 | class PiVideoFrameType(object):
195 | """
196 | This class simply defines constants used to represent the type of a frame
197 | in :attr:`PiVideoFrame.frame_type`. Effectively it is a namespace for an
198 | enum.
199 |
200 | .. attribute:: frame
201 |
202 | Indicates a predicted frame (P-frame). This is the most common frame
203 | type.
204 |
205 | .. attribute:: key_frame
206 |
207 | Indicates an intra-frame (I-frame) also known as a key frame.
208 |
209 | .. attribute:: sps_header
210 |
211 | Indicates an inline SPS/PPS header (rather than picture data) which is
212 | typically used as a split point.
213 |
214 | .. attribute:: motion_data
215 |
216 | Indicates the frame is inline motion vector data, rather than picture
217 | data.
218 |
219 | .. versionadded:: 1.5
220 | """
221 | frame = 0
222 | key_frame = 1
223 | sps_header = 2
224 | motion_data = 3
225 |
226 |
227 | class PiVideoFrame(namedtuple('PiVideoFrame', (
228 | 'index', # the frame number, where the first frame is 0
229 | 'frame_type', # a constant indicating the frame type (see PiVideoFrameType)
230 | 'frame_size', # the size (in bytes) of the frame's data
231 | 'video_size', # the size (in bytes) of the video so far
232 | 'split_size', # the size (in bytes) of the video since the last split
233 | 'timestamp', # the presentation timestamp (PTS) of the frame
234 | 'complete', # whether the frame is complete or not
235 | ))):
236 | """
237 | This class is a namedtuple derivative used to store information about a
238 | video frame. It is recommended that you access the information stored by
239 | this class by attribute name rather than position (for example:
240 | ``frame.index`` rather than ``frame[0]``).
241 |
242 | .. attribute:: index
243 |
244 | Returns the zero-based number of the frame. This is a monotonic counter
245 | that is simply incremented every time the camera starts outputting a
246 | new frame. As a consequence, this attribute cannot be used to detect
247 | dropped frames. Nor does it necessarily represent actual frames; it
248 | will be incremented for SPS headers and motion data buffers too.
249 |
250 | .. attribute:: frame_type
251 |
252 | Returns a constant indicating the kind of data that the frame contains
253 | (see :class:`PiVideoFrameType`). Please note that certain frame types
254 | contain no image data at all.
255 |
256 | .. attribute:: frame_size
257 |
258 | Returns the size in bytes of the current frame. If a frame is written
259 | in multiple chunks, this value will increment while :attr:`index`
260 | remains static. Query :attr:`complete` to determine whether the frame
261 | has been completely output yet.
262 |
263 | .. attribute:: video_size
264 |
265 | Returns the size in bytes of the entire video up to the current frame.
266 | Note that this is unlikely to match the size of the actual file/stream
267 | written so far. This is because a stream may utilize buffering which
268 | will cause the actual amount written (e.g. to disk) to lag behind the
269 | value reported by this attribute.
270 |
271 | .. attribute:: split_size
272 |
273 | Returns the size in bytes of the video recorded since the last call to
274 | either :meth:`~picamera.camera.PiCamera.start_recording` or
275 | :meth:`~picamera.camera.PiCamera.split_recording`. For the reasons
276 | explained above, this may differ from the size of the actual
277 | file/stream written so far.
278 |
279 | .. attribute:: timestamp
280 |
281 | Returns the presentation timestamp (PTS) of the current frame as
282 | reported by the encoder. This is represented by the number of
283 | microseconds (millionths of a second) since video recording started. As
284 | the frame attribute is only updated when the encoder outputs the end of
285 | a frame, this value may lag behind the actual time since
286 | :meth:`~picamera.camera.PiCamera.start_recording` was called.
287 |
288 | .. warning::
289 |
290 | Currently, the video encoder occasionally returns "time unknown"
291 | values in this field which picamera represents as ``None``. If you
292 | are querying this property you will need to check the value is not
293 | ``None`` before using it.
294 |
295 | .. attribute:: complete
296 |
297 | Returns a bool indicating whether the current frame is complete or not.
298 | If the frame is complete then :attr:`frame_size` will not increment
299 | any further, and will reset for the next frame.
300 |
301 | .. versionchanged:: 1.5
302 | Deprecated :attr:`header` and :attr:`keyframe` attributes and added the
303 | new :attr:`frame_type` attribute instead.
304 |
305 | .. versionchanged:: 1.9
306 | Added the :attr:`complete` attribute.
307 | """
308 |
309 | @property
310 | def position(self):
311 | """
312 | Returns the zero-based position of the frame in the stream containing
313 | it.
314 | """
315 | return self.split_size - self.frame_size
316 |
317 | @property
318 | def keyframe(self):
319 | """
320 | Returns a bool indicating whether the current frame is a keyframe (an
321 | intra-frame, or I-frame in MPEG parlance).
322 |
323 | .. deprecated:: 1.5
324 | Please compare :attr:`frame_type` to
325 | :attr:`PiVideoFrameType.key_frame` instead.
326 | """
327 | warnings.warn(
328 | PiCameraDeprecated(
329 | 'PiVideoFrame.keyframe is deprecated; please check '
330 | 'PiVideoFrame.frame_type for equality with '
331 | 'PiVideoFrameType.key_frame instead'))
332 | return self.frame_type == PiVideoFrameType.key_frame
333 |
334 | @property
335 | def header(self):
336 | """
337 | Contains a bool indicating whether the current frame is actually an
338 | SPS/PPS header. Typically it is best to split an H.264 stream so that
339 | it starts with an SPS/PPS header.
340 |
341 | .. deprecated:: 1.5
342 | Please compare :attr:`frame_type` to
343 | :attr:`PiVideoFrameType.sps_header` instead.
344 | """
345 | warnings.warn(
346 | PiCameraDeprecated(
347 | 'PiVideoFrame.header is deprecated; please check '
348 | 'PiVideoFrame.frame_type for equality with '
349 | 'PiVideoFrameType.sps_header instead'))
350 | return self.frame_type == PiVideoFrameType.sps_header
351 |
352 |
353 | def _debug_buffer(buf):
354 | f = buf[0].flags
355 | print(''.join((
356 | 'flags=',
357 | 'E' if f & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END else '_',
358 | 'K' if f & mmal.MMAL_BUFFER_HEADER_FLAG_KEYFRAME else '_',
359 | 'C' if f & mmal.MMAL_BUFFER_HEADER_FLAG_CONFIG else '_',
360 | 'M' if f & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else '_',
361 | 'X' if f & mmal.MMAL_BUFFER_HEADER_FLAG_EOS else '_',
362 | ' ',
363 | 'len=%d' % buf[0].length,
364 | )))
365 |
366 |
367 | def _encoder_callback(port, buf):
368 | #_debug_buffer(buf)
369 | encoder = ct.cast(port[0].userdata, ct.POINTER(ct.py_object))[0]
370 | encoder._callback(port, buf)
371 | _encoder_callback = mmal.MMAL_PORT_BH_CB_T(_encoder_callback)
372 |
373 |
374 | class PiEncoder(object):
375 | """
376 | Base implementation of an MMAL encoder for use by PiCamera.
377 |
378 | The *parent* parameter specifies the :class:`~picamera.camera.PiCamera`
379 | instance that has constructed the encoder. The *camera_port* parameter
380 | provides the MMAL camera port that the encoder should enable for capture
381 | (this will be the still or video port of the camera component). The
382 | *input_port* parameter specifies the MMAL port that the encoder should
383 | connect to its input. Sometimes this will be the same as the camera port,
384 | but if other components are present in the pipeline (e.g. a splitter), it
385 | may be different.
386 |
387 | The *format* parameter specifies the format that the encoder should
388 | produce in its output. This is specified as a string and will be one of
389 | the following for image encoders:
390 |
391 | * ``'jpeg'``
392 | * ``'png'``
393 | * ``'gif'``
394 | * ``'bmp'``
395 | * ``'yuv'``
396 | * ``'rgb'``
397 | * ``'rgba'``
398 | * ``'bgr'``
399 | * ``'bgra'``
400 |
401 | And one of the following for video encoders:
402 |
403 | * ``'h264'``
404 | * ``'mjpeg'``
405 |
406 | The *resize* parameter is either ``None`` (indicating no resizing
407 | should take place), or a ``(width, height)`` tuple specifying the
408 | resolution that the output of the encoder should be resized to.
409 |
410 | Finally, the *options* parameter specifies additional keyword arguments
411 | that can be used to configure the encoder (e.g. bitrate for videos, or
412 | quality for images).
413 |
414 | The class has a number of attributes:
415 |
416 | .. attribute:: camera_port
417 |
418 | A pointer to the camera output port that needs to be activated and
419 | deactivated in order to start/stop capture. This is not necessarily the
420 | port that the encoder component's input port is connected to (for
421 | example, in the case of video-port based captures, this will be the
422 | camera video port behind the splitter).
423 |
424 | .. attribute:: encoder
425 |
426 | A pointer to the MMAL encoder component, or None if no encoder
427 | component has been created (some encoder classes don't use an actual
428 | encoder component, for example :class:`PiRawImageMixin`).
429 |
430 | .. attribute:: encoder_connection
431 |
432 | A pointer to the MMAL connection linking the encoder's input port to
433 | the camera, splitter, or resizer output port (depending on
434 | configuration), if any.
435 |
436 | .. attribute:: event
437 |
438 | A :class:`threading.Event` instance used to synchronize operations
439 | (like start, stop, and split) between the control thread and the
440 | callback thread.
441 |
442 | .. attribute:: exception
443 |
444 | If an exception occurs during the encoder callback, this attribute is
445 | used to store the exception until it can be re-raised in the control
446 | thread.
447 |
448 | .. attribute:: format
449 |
450 | The image or video format that the encoder is expected to produce. This
451 | is equal to the value of the *format* parameter.
452 |
453 | .. attribute:: input_port
454 |
455 | A pointer to the MMAL port that the encoder component's input port
456 | should be connected to.
457 |
458 | .. attribute:: output_port
459 |
460 | A pointer to the MMAL port of the encoder's output. In the case no
461 | encoder component is created, this should be the camera/component
462 | output port responsible for producing data. In other words, this
463 | attribute **must** be set on initialization.
464 |
465 | .. attribute:: outputs
466 |
467 | A mapping of ``key`` to ``(output, opened)`` tuples where ``output``
468 | is a file-like object, and ``opened`` is a bool indicating whether or
469 | not we opened the output object (and thus whether we are responsible
470 | for eventually closing it).
471 |
472 | .. attribute:: outputs_lock
473 |
474 | A :func:`threading.Lock` instance used to protect access to
475 | :attr:`outputs`.
476 |
477 | .. attribute:: parent
478 |
479 | The :class:`~picamera.camera.PiCamera` instance that created this
480 | PiEncoder instance.
481 |
482 | .. attribute:: pool
483 |
484 | A pointer to a pool of MMAL buffers.
485 |
486 | .. attribute:: resizer
487 |
488 | A pointer to the MMAL resizer component, or None if no resizer
489 | component has been created.
490 |
491 | .. attribute:: resizer_connection
492 |
493 | A pointer to the MMAL connection linking the resizer's input port to
494 | the camera or splitter's output port, if any.
495 | """
496 |
497 | encoder_type = None
498 |
499 | def __init__(
500 | self, parent, camera_port, input_port, format, resize, **options):
501 | self.parent = parent
502 | self.format = format
503 | self.encoder = None
504 | self.resizer = None
505 | self.encoder_connection = None
506 | self.resizer_connection = None
507 | self.camera_port = camera_port
508 | self.input_port = input_port
509 | self.output_port = None
510 | self.pool = None
511 | self.started_capture = False
512 | self.outputs_lock = threading.Lock() # protects access to self.outputs
513 | self.outputs = {}
514 | self.exception = None
515 | self.event = threading.Event()
516 | self.stopped = True
517 | try:
518 | if parent.closed:
519 | raise PiCameraRuntimeError("Camera is closed")
520 | if resize:
521 | self._create_resizer(*resize)
522 | self._create_encoder(**options)
523 | self._create_pool()
524 | self._create_connections()
525 | except:
526 | self.close()
527 | raise
528 |
529 | def _create_resizer(self, width, height):
530 | """
531 | Creates and configures an MMAL resizer component.
532 |
533 | This is called when the initializer's *resize* parameter is something
534 | other than ``None``. The *width* and *height* parameters are passed to
535 | the constructed resizer. Note that this method only constructs the
536 | resizer - it does not connect it to the encoder. The method sets the
537 | :attr:`resizer` attribute to the constructed resizer component.
538 | """
539 | self.resizer = ct.POINTER(mmal.MMAL_COMPONENT_T)()
540 | mmal_check(
541 | mmal.mmal_component_create(
542 | mmal.MMAL_COMPONENT_DEFAULT_RESIZER, self.resizer),
543 | prefix="Failed to create resizer component")
544 | if not self.resizer[0].input_num:
545 | raise PiCameraError("No input ports on resizer component")
546 | if not self.resizer[0].output_num:
547 | raise PiCameraError("No output ports on resizer component")
548 | # Copy the original input port's format to the resizer's input,
549 | # then the resizer's input format to the output, and configure it
550 | mmal.mmal_format_copy(
551 | self.resizer[0].input[0][0].format, self.input_port[0].format)
552 | mmal_check(
553 | mmal.mmal_port_format_commit(self.resizer[0].input[0]),
554 | prefix="Failed to set resizer input port format")
555 | mmal.mmal_format_copy(
556 | self.resizer[0].output[0][0].format, self.resizer[0].input[0][0].format)
557 | fmt = self.resizer[0].output[0][0].format
558 | fmt[0].es[0].video.width = mmal.VCOS_ALIGN_UP(width, 32)
559 | fmt[0].es[0].video.height = mmal.VCOS_ALIGN_UP(height, 16)
560 | fmt[0].es[0].video.crop.x = 0
561 | fmt[0].es[0].video.crop.y = 0
562 | fmt[0].es[0].video.crop.width = width
563 | fmt[0].es[0].video.crop.height = height
564 | mmal_check(
565 | mmal.mmal_port_format_commit(self.resizer[0].output[0]),
566 | prefix="Failed to set resizer output port format")
567 |
568 | def _create_encoder(self):
569 | """
570 | Creates and configures the MMAL encoder component.
571 |
572 | This method only constructs the encoder; it does not connect it to the
573 | input port. The method sets the :attr:`encoder` attribute to the
574 | constructed encoder component, and the :attr:`output_port` attribute to
575 | the encoder's output port (or the previously constructed resizer's
576 | output port if one has been requested). Descendent classes extend this
577 | method to finalize encoder configuration.
578 |
579 | .. note::
580 |
581 | It should be noted that this method is called with the
582 | initializer's ``option`` keyword arguments. This base
583 | implementation expects no additional arguments, but descendent
584 | classes extend the parameter list to include options relevant to
585 | them.
586 | """
587 | assert not self.encoder
588 | self.encoder = ct.POINTER(mmal.MMAL_COMPONENT_T)()
589 | mmal_check(
590 | mmal.mmal_component_create(self.encoder_type, self.encoder),
591 | prefix="Failed to create encoder component")
592 | if not self.encoder[0].input_num:
593 | raise PiCameraError("No input ports on encoder component")
594 | if not self.encoder[0].output_num:
595 | raise PiCameraError("No output ports on encoder component")
596 | # Ensure output format is the same as the input
597 | self.output_port = self.encoder[0].output[0]
598 | if self.resizer:
599 | mmal.mmal_format_copy(
600 | self.encoder[0].input[0][0].format, self.resizer[0].output[0][0].format)
601 | else:
602 | mmal.mmal_format_copy(
603 | self.encoder[0].input[0][0].format, self.input_port[0].format)
604 | mmal_check(
605 | mmal.mmal_port_format_commit(self.encoder[0].input[0]),
606 | prefix="Failed to set encoder input port format")
607 | mmal.mmal_format_copy(
608 | self.output_port[0].format, self.encoder[0].input[0][0].format)
609 | # Set buffer size and number to appropriate values
610 | if self.format == 'mjpeg':
611 | # There is a bug in the MJPEG encoder that causes a deadlock if the
612 | # FIFO is full on shutdown. Increasing the encoder buffer size
613 | # makes this less likely to happen. See
614 | # https://github.com/raspberrypi/userland/issues/208
615 | self.output_port[0].buffer_size = max(512 * 1024, self.output_port[0].buffer_size_recommended)
616 | else:
617 | self.output_port[0].buffer_size = self.output_port[0].buffer_size_recommended
618 | self.output_port[0].buffer_num = self.output_port[0].buffer_num_recommended
619 | # NOTE: We deliberately don't commit the output port format here as
620 | # this is a base class and the output configuration is incomplete at
621 | # this point. Descendents are expected to finish configuring the
622 | # encoder and then commit the port format themselves
623 |
624 | def _create_pool(self):
625 | """
626 | Allocates a pool of MMAL buffers for the encoder.
627 |
628 | This method is expected to construct an MMAL pool of buffers for the
629 | :attr:`output_port`, and store the result in the :attr:`pool`
630 | attribute.
631 | """
632 | assert not self.pool
633 | self.pool = mmal.mmal_port_pool_create(
634 | self.output_port,
635 | self.output_port[0].buffer_num,
636 | self.output_port[0].buffer_size)
637 | if not self.pool:
638 | raise PiCameraError(
639 | "Failed to create buffer header pool for encoder component")
640 |
641 | def _create_connections(self):
642 | """
643 | Creates all connections between MMAL components.
644 |
645 | This method is called to connect the encoder and the optional resizer
646 | to the input port provided by the camera. It sets the
647 | :attr:`encoder_connection` and :attr:`resizer_connection` attributes as
648 | required.
649 | """
650 | assert not self.encoder_connection
651 | if self.resizer:
652 | self.resizer_connection = self.parent._connect_ports(
653 | self.input_port, self.resizer[0].input[0])
654 | self.encoder_connection = self.parent._connect_ports(
655 | self.resizer[0].output[0], self.encoder[0].input[0])
656 | else:
657 | self.encoder_connection = self.parent._connect_ports(
658 | self.input_port, self.encoder[0].input[0])
659 |
660 | def _callback(self, port, buf):
661 | """
662 | The encoder's main callback function.
663 |
664 | When the encoder is active, this method is periodically called in a
665 | background thread. The *port* parameter specifies the MMAL port
666 | providing the output (typically this is the encoder's output port, but
667 | in the case of unencoded captures may simply be a camera port), while
668 | the *buf* parameter is an MMAL buffer header pointer which can be used
669 | to obtain the data to write, along with meta-data about the current
670 | frame.
671 |
672 | This method *must* release the MMAL buffer header before returning
673 | (failure to do so will cause a lockup), and should recycle buffers if
674 | expecting further data (the :meth:`_callback_recycle` method can be
675 | called to perform the latter duty). Finally, this method must set
676 | :attr:`event` when the encoder has finished (and should set
677 | :attr:`exception` if an exception occurred during encoding).
678 |
679 | Developers wishing to write a custom encoder class may find it simpler
680 | to override the :meth:`_callback_write` method, rather than deal with
681 | these complexities.
682 | """
683 | if self.stopped:
684 | mmal.mmal_buffer_header_release(buf)
685 | else:
686 | stop = False
687 | try:
688 | try:
689 | mmal_check(
690 | mmal.mmal_buffer_header_mem_lock(buf),
691 | prefix="Unable to lock buffer header memory")
692 | try:
693 | stop = self._callback_write(buf)
694 | finally:
695 | mmal.mmal_buffer_header_mem_unlock(buf)
696 | finally:
697 | mmal.mmal_buffer_header_release(buf)
698 | self._callback_recycle(port, buf)
699 | except Exception as e:
700 | stop = True
701 | self.exception = e
702 | if stop:
703 | self.stopped = True
704 | self.event.set()
705 |
706 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
707 | """
708 | Writes output on behalf of the encoder callback function.
709 |
710 | This method is called by :meth:`_callback` to handle writing to an
711 | object in :attr:`outputs` identified by *key*. The *buf* parameter is
712 | an MMAL buffer header pointer which can be used to obtain the length of
713 | data available (``buf[0].length``), a pointer to the data
714 | (``buf[0].data``) which should typically be used with
715 | :func:`ctypes.string_at`, and meta-data about the contents of the
716 | buffer (``buf[0].flags``). The method is expected to return a boolean
717 | to indicate whether output is complete (``True``) or whether more data
718 | is expected (``False``).
719 |
720 | The default implementation simply writes the contents of the buffer to
721 | the output identified by *key*, and returns ``True`` if the buffer
722 | flags indicate end of stream. Image encoders will typically override
723 | the return value to indicate ``True`` on end of frame (as they only
724 | wish to output a single image). Video encoders will typically override
725 | this method to determine where key-frames and SPS headers occur.
726 | """
727 | if buf[0].length:
728 | with self.outputs_lock:
729 | try:
730 | written = self.outputs[key][0].write(
731 | ct.string_at(buf[0].data, buf[0].length))
732 | except KeyError:
733 | pass
734 | else:
735 | # Ignore None return value; most Python 2 streams have
736 | # no return value for write()
737 | if (written is not None) and (written != buf[0].length):
738 | raise PiCameraError(
739 | "Unable to write buffer to output %s" % key)
740 | return bool(buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_EOS)
741 |
742 | def _callback_recycle(self, port, buf):
743 | """
744 | Recycles the buffer on behalf of the encoder callback function.
745 |
746 | This method is called by :meth:`_callback` when there is a buffer to
747 | recycle (because further output is expected). It is unlikely descendent
748 | classes will have a need to override this method, but if they override
749 | the :meth:`_callback` method they may wish to call it.
750 | """
751 | new_buf = mmal.mmal_queue_get(self.pool[0].queue)
752 | if not new_buf:
753 | raise PiCameraError(
754 | "Unable to get a buffer to return to the encoder port")
755 | mmal_check(
756 | mmal.mmal_port_send_buffer(port, new_buf),
757 | prefix="Unable to return a buffer to the encoder port")
758 |
759 | def _open_output(self, output, key=PiVideoFrameType.frame):
760 | """
761 | Opens *output* and associates it with *key* in :attr:`outputs`.
762 |
763 | If *output* is a string, this method opens it as a filename and keeps
764 | track of the fact that the encoder was the one to open it (which
765 | implies that :meth:`_close_output` should eventually close it).
766 | Otherwise, *output* is assumed to be a file-like object and is used
767 | verbatim. The opened output is added to the :attr:`outputs` dictionary
768 | with the specified *key*.
769 | """
770 | with self.outputs_lock:
771 | opened = isinstance(output, (bytes, str))
772 | if opened:
773 | # Open files in binary mode with a decent buffer size
774 | output = io.open(output, 'wb', buffering=65536)
775 | self.outputs[key] = (output, opened)
776 |
777 | def _close_output(self, key=PiVideoFrameType.frame):
778 | """
779 | Closes the output associated with *key* in :attr:`outputs`.
780 |
781 | Closes the output object associated with the specified *key*, and
782 | removes it from the :attr:`outputs` dictionary (if we didn't open the
783 | object then we attempt to flush it instead).
784 | """
785 | with self.outputs_lock:
786 | try:
787 | (output, opened) = self.outputs.pop(key)
788 | except KeyError:
789 | pass
790 | else:
791 | if opened:
792 | output.close()
793 | else:
794 | try:
795 | output.flush()
796 | except AttributeError:
797 | pass
798 |
799 | @property
800 | def active(self):
801 | """
802 | Returns ``True`` if the MMAL encoder exists and is enabled.
803 | """
804 | return bool(self.encoder and self.output_port[0].is_enabled)
805 |
806 | def start(self, output):
807 | """
808 | Starts the encoder object writing to the specified output.
809 |
810 | This method is called by the camera to start the encoder capturing
811 | data from the camera to the specified output. The *output* parameter
812 | is either a filename, or a file-like object (for image and video
813 | encoders), or an iterable of filenames or file-like objects (for
814 | multi-image encoders).
815 | """
816 | self.event.clear()
817 | self.stopped = False
818 | self.exception = None
819 | self._open_output(output)
820 | self.output_port[0].userdata = ct.cast(
821 | ct.pointer(ct.py_object(self)),
822 | ct.c_void_p)
823 | with self.parent._encoders_lock:
824 | mmal_check(
825 | mmal.mmal_port_enable(self.output_port, _encoder_callback),
826 | prefix="Failed to enable encoder output port")
827 | for q in range(mmal.mmal_queue_length(self.pool[0].queue)):
828 | buf = mmal.mmal_queue_get(self.pool[0].queue)
829 | if not buf:
830 | raise PiCameraRuntimeError(
831 | "Unable to get a required buffer from pool queue")
832 | mmal_check(
833 | mmal.mmal_port_send_buffer(self.output_port, buf),
834 | prefix="Unable to send a buffer to encoder output port")
835 | self.parent._start_capture(self.camera_port)
836 |
837 | def wait(self, timeout=None):
838 | """
839 | Waits for the encoder to finish (successfully or otherwise).
840 |
841 | This method is called by the owning camera object to block execution
842 | until the encoder has completed its task. If the *timeout* parameter
843 | is None, the method will block indefinitely. Otherwise, the *timeout*
844 | parameter specifies the (potentially fractional) number of seconds
845 | to block for. If the encoder finishes successfully within the timeout,
846 | the method returns ``True``. Otherwise, it returns ``False``.
847 | """
848 | result = self.event.wait(timeout)
849 | if result:
850 | self.stop()
851 | # Check whether the callback set an exception
852 | if self.exception:
853 | raise self.exception
854 | return result
855 |
856 |
857 | def stop(self):
858 | """
859 | Stops the encoder, regardless of whether it's finished.
860 |
861 | This method is called by the camera to terminate the execution of the
862 | encoder. Typically, this is used with video to stop the recording, but
863 | can potentially be called in the middle of image capture to terminate
864 | the capture.
865 | """
866 | # The check below is not a race condition; we ignore the EINVAL error
867 | # in the case the port turns out to be disabled when we disable below.
868 | # The check exists purely to prevent stderr getting spammed by our
869 | # continued attempts to disable an already disabled port
870 | with self.parent._encoders_lock:
871 | if self.active:
872 | self.parent._stop_capture(self.camera_port)
873 | try:
874 | mmal_check(
875 | mmal.mmal_port_disable(self.output_port),
876 | prefix="Failed to disable encoder output port")
877 | except PiCameraMMALError as e:
878 | if e.status != mmal.MMAL_EINVAL:
879 | raise
880 | self.stopped = True
881 | self.event.set()
882 | self._close_output()
883 |
884 | def close(self):
885 | """
886 | Finalizes the encoder and deallocates all structures.
887 |
888 | This method is called by the camera prior to destroying the encoder (or
889 | more precisely, letting it go out of scope to permit the garbage
890 | collector to destroy it at some future time). The method destroys all
891 | components that the various create methods constructed and resets their
892 | attributes.
893 | """
894 | self.stop()
895 | if self.encoder_connection:
896 | mmal.mmal_connection_destroy(self.encoder_connection)
897 | self.encoder_connection = None
898 | if self.pool:
899 | mmal.mmal_port_pool_destroy(self.output_port, self.pool)
900 | self.pool = None
901 | if self.resizer_connection:
902 | mmal.mmal_connection_destroy(self.resizer_connection)
903 | if self.encoder:
904 | mmal.mmal_component_destroy(self.encoder)
905 | self.encoder = None
906 | if self.resizer:
907 | mmal.mmal_component_destroy(self.resizer)
908 | self.resizer = None
909 | self.output_port = None
910 |
911 |
912 | class PiRawMixin(PiEncoder):
913 | """
914 | Mixin class for "raw" (unencoded) output.
915 |
916 | This mixin class overrides the initializer of :class:`PiEncoder`, along
917 | with :meth:`_create_resizer` and :meth:`_create_encoder` to configure the
918 | pipeline for unencoded output. Specifically, it disables the construction
919 | of an encoder, and sets the output port to the input port passed to the
920 | initializer, unless resizing is required (either for actual resizing, or
921 | for format conversion) in which case the resizer's output is used.
922 | """
923 |
924 | RAW_ENCODINGS = {
925 | # name mmal-encoding bytes-per-pixel
926 | 'yuv': (mmal.MMAL_ENCODING_I420, 1.5),
927 | 'rgb': (mmal.MMAL_ENCODING_RGBA, 3),
928 | 'rgba': (mmal.MMAL_ENCODING_RGBA, 4),
929 | 'bgr': (mmal.MMAL_ENCODING_BGRA, 3),
930 | 'bgra': (mmal.MMAL_ENCODING_BGRA, 4),
931 | }
932 |
933 | def __init__(
934 | self, parent, camera_port, input_port, format, resize, **options):
935 | # If a resize hasn't been requested, check the input_port format. If
936 | # it requires conversion, force the use of a resizer to perform the
937 | # conversion
938 | if not resize:
939 | if parent.RAW_FORMATS[format] != input_port[0].format[0].encoding.value:
940 | resize = parent.resolution
941 | # Workaround: If a non-alpha format is requested when a resizer is
942 | # required, we use the alpha-inclusive format and set a flag to get the
943 | # callback to strip the alpha bytes (for some reason the resizer won't
944 | # work with non-alpha output formats - firmware bug?)
945 | if resize:
946 | width, height = resize
947 | self._strip_alpha = format in ('rgb', 'bgr')
948 | else:
949 | width, height = parent.resolution
950 | self._strip_alpha = False
951 | width = mmal.VCOS_ALIGN_UP(width, 32)
952 | height = mmal.VCOS_ALIGN_UP(height, 16)
953 | # Workaround (#83): when the resizer is used the width and height must
954 | # be aligned (both the actual and crop values) to avoid an error when
955 | # the output port format is set
956 | if resize:
957 | resize = (width, height)
958 | # Workaround: Calculate the expected image size, to be used by the
959 | # callback to decide when a frame ends. This is to work around a
960 | # firmware bug that causes the raw image to be returned twice when the
961 | # maximum camera resolution is requested
962 | self._frame_size = int(width * height * self.RAW_ENCODINGS[format][1])
963 | super(PiRawMixin, self).__init__(
964 | parent, camera_port, input_port, format, resize, **options)
965 |
966 | def _create_resizer(self, width, height):
967 | """
968 | Overridden to configure the resizer's output with the required
969 | encoding.
970 | """
971 | super(PiRawMixin, self)._create_resizer(width, height)
972 | encoding = self.RAW_ENCODINGS[self.format][0]
973 | port = self.resizer[0].output[0]
974 | port[0].format[0].encoding = encoding
975 | port[0].format[0].encoding_variant = encoding
976 | mmal_check(
977 | mmal.mmal_port_format_commit(port),
978 | prefix="Failed to set resizer output port format")
979 |
980 | def _create_encoder(self):
981 | """
982 | Overridden to skip creating an encoder. Instead, this class simply uses
983 | the resizer's port as the output port (if a resizer has been
984 | configured) or the specified input port otherwise.
985 | """
986 | if self.resizer:
987 | self.output_port = self.resizer[0].output[0]
988 | else:
989 | self.output_port = self.input_port
990 |
991 | def _create_connections(self):
992 | """
993 | Overridden to skip creating an encoder connection; only a resizer
994 | connection is required (if one has been configured).
995 | """
996 | if self.resizer:
997 | self.resizer_connection = self.parent._connect_ports(
998 | self.input_port, self.resizer[0].input[0])
999 |
1000 | @property
1001 | def active(self):
1002 | return bool(self.output_port[0].is_enabled)
1003 |
1004 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
1005 | """
1006 | Overridden to strip alpha bytes when required.
1007 | """
1008 | if self._strip_alpha:
1009 | s = ct.string_at(buf[0].data, buf[0].length)
1010 | s = bytearray(s)
1011 | del s[3::4]
1012 | # All this messing around with buffers is to work around some issue
1013 | # with MMAL or ctypes (I'm not sure which is at fault). Anyway, the
1014 | # upshot is that if you fiddle with buf[0].data in any way
1015 | # whatsoever (even if you make every attempt to restore its value
1016 | # afterward), mmal_port_disable locks up when we call it in stop()
1017 | new_buf = mmal.MMAL_BUFFER_HEADER_T.from_buffer_copy(buf[0])
1018 | new_buf.length = len(s)
1019 | new_buf.data = ct.pointer(ct.c_uint8.from_buffer(s))
1020 | return super(PiRawMixin, self)._callback_write(ct.pointer(new_buf), key)
1021 | else:
1022 | return super(PiRawMixin, self)._callback_write(buf, key)
1023 |
1024 |
1025 | class PiVideoEncoder(PiEncoder):
1026 | """
1027 | Encoder for video recording.
1028 |
1029 | This derivative of :class:`PiEncoder` configures itself for H.264 or MJPEG
1030 | encoding. It also introduces a :meth:`split` method which is used by
1031 | :meth:`~picamera.camera.PiCamera.split_recording` and
1032 | :meth:`~picamera.camera.PiCamera.record_sequence` to redirect future output
1033 | to a new filename or object. Finally, it also extends
1034 | :meth:`PiEncoder.start` and :meth:`PiEncoder._callback_write` to track
1035 | video frame meta-data, and to permit recording motion data to a separate
1036 | output object.
1037 | """
1038 |
1039 | encoder_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER
1040 |
1041 | def __init__(
1042 | self, parent, camera_port, input_port, format, resize, **options):
1043 | super(PiVideoEncoder, self).__init__(
1044 | parent, camera_port, input_port, format, resize, **options)
1045 | self._next_output = []
1046 | self.frame = None
1047 |
1048 | def _create_encoder(
1049 | self, bitrate=17000000, intra_period=None, profile='high',
1050 | quantization=0, quality=0, inline_headers=True, sei=False,
1051 | motion_output=None, intra_refresh=None):
1052 | """
1053 | Extends the base :meth:`~PiEncoder._create_encoder` implementation to
1054 | configure the video encoder for H.264 or MJPEG output.
1055 | """
1056 | super(PiVideoEncoder, self)._create_encoder()
1057 |
1058 | # XXX Remove quantization in 2.0
1059 | quality = quality or quantization
1060 |
1061 | try:
1062 | self.output_port[0].format[0].encoding = {
1063 | 'h264': mmal.MMAL_ENCODING_H264,
1064 | 'mjpeg': mmal.MMAL_ENCODING_MJPEG,
1065 | }[self.format]
1066 | except KeyError:
1067 | raise PiCameraValueError('Unrecognized format %s' % self.format)
1068 |
1069 | if not (0 <= bitrate <= 25000000):
1070 | raise PiCameraValueError('bitrate must be between 0 and 25Mbps')
1071 | self.output_port[0].format[0].bitrate = bitrate
1072 | self.output_port[0].format[0].es[0].video.frame_rate.num = 0
1073 | self.output_port[0].format[0].es[0].video.frame_rate.den = 1
1074 | mmal_check(
1075 | mmal.mmal_port_format_commit(self.output_port),
1076 | prefix="Unable to set format on encoder output port")
1077 |
1078 | if self.format == 'h264':
1079 | mp = mmal.MMAL_PARAMETER_VIDEO_PROFILE_T(
1080 | mmal.MMAL_PARAMETER_HEADER_T(
1081 | mmal.MMAL_PARAMETER_PROFILE,
1082 | ct.sizeof(mmal.MMAL_PARAMETER_VIDEO_PROFILE_T),
1083 | ),
1084 | )
1085 | try:
1086 | mp.profile[0].profile = {
1087 | 'baseline': mmal.MMAL_VIDEO_PROFILE_H264_BASELINE,
1088 | 'main': mmal.MMAL_VIDEO_PROFILE_H264_MAIN,
1089 | 'high': mmal.MMAL_VIDEO_PROFILE_H264_HIGH,
1090 | 'constrained': mmal.MMAL_VIDEO_PROFILE_H264_CONSTRAINED_BASELINE,
1091 | }[profile]
1092 | except KeyError:
1093 | raise PiCameraValueError("Invalid H.264 profile %s" % profile)
1094 | mp.profile[0].level = mmal.MMAL_VIDEO_LEVEL_H264_4
1095 | mmal_check(
1096 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1097 | prefix="Unable to set encoder H.264 profile")
1098 |
1099 | if inline_headers:
1100 | mmal_check(
1101 | mmal.mmal_port_parameter_set_boolean(
1102 | self.output_port,
1103 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER,
1104 | mmal.MMAL_TRUE),
1105 | prefix="Unable to set inline_headers")
1106 |
1107 | if sei:
1108 | mmal_check(
1109 | mmal.mmal_port_parameter_set_boolean(
1110 | self.output_port,
1111 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_SEI_ENABLE,
1112 | mmal.MMAL_TRUE),
1113 | prefix="Unable to set SEI")
1114 |
1115 | if motion_output:
1116 | mmal_check(
1117 | mmal.mmal_port_parameter_set_boolean(
1118 | self.output_port,
1119 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_VECTORS,
1120 | mmal.MMAL_TRUE),
1121 | prefix="Unable to set inline motion vectors")
1122 |
1123 | # We need the intra-period to calculate the SPS header timeout in
1124 | # the split method below. If one is not set explicitly, query the
1125 | # encoder's default
1126 | if intra_period is not None:
1127 | mp = mmal.MMAL_PARAMETER_UINT32_T(
1128 | mmal.MMAL_PARAMETER_HEADER_T(
1129 | mmal.MMAL_PARAMETER_INTRAPERIOD,
1130 | ct.sizeof(mmal.MMAL_PARAMETER_UINT32_T),
1131 | ),
1132 | intra_period
1133 | )
1134 | mmal_check(
1135 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1136 | prefix="Unable to set encoder intra_period")
1137 | self._intra_period = intra_period
1138 | else:
1139 | mp = mmal.MMAL_PARAMETER_UINT32_T(
1140 | mmal.MMAL_PARAMETER_HEADER_T(
1141 | mmal.MMAL_PARAMETER_INTRAPERIOD,
1142 | ct.sizeof(mmal.MMAL_PARAMETER_UINT32_T),
1143 | ))
1144 | mmal_check(
1145 | mmal.mmal_port_parameter_get(self.output_port, mp.hdr),
1146 | prefix="Unable to get encoder intra_period")
1147 | self._intra_period = mp.value
1148 |
1149 | if intra_refresh is not None:
1150 | # Get the intra-refresh structure first as there are several
1151 | # other fields in it which we don't wish to overwrite
1152 | mp = mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH_T(
1153 | mmal.MMAL_PARAMETER_HEADER_T(
1154 | mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH,
1155 | ct.sizeof(mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH_T),
1156 | ))
1157 | # Deliberately avoid checking whether this call succeeds
1158 | mmal.mmal_port_parameter_get(self.output_port, mp.hdr)
1159 | try:
1160 | mp.refresh_mode = {
1161 | 'cyclic': mmal.MMAL_VIDEO_INTRA_REFRESH_CYCLIC,
1162 | 'adaptive': mmal.MMAL_VIDEO_INTRA_REFRESH_ADAPTIVE,
1163 | 'both': mmal.MMAL_VIDEO_INTRA_REFRESH_BOTH,
1164 | 'cyclicrows': mmal.MMAL_VIDEO_INTRA_REFRESH_CYCLIC_MROWS,
1165 | }[intra_refresh]
1166 | except KeyError:
1167 | raise PiCameraValueError(
1168 | "Invalid intra_refresh %s" % intra_refresh)
1169 | mmal_check(
1170 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1171 | prefix="Unable to set encoder intra_refresh")
1172 |
1173 | elif self.format == 'mjpeg':
1174 | # MJPEG doesn't have an intra_period setting as such, but as every
1175 | # frame is a full-frame, the intra_period is effectively 1
1176 | self._intra_period = 1
1177 |
1178 | if quality:
1179 | mp = mmal.MMAL_PARAMETER_UINT32_T(
1180 | mmal.MMAL_PARAMETER_HEADER_T(
1181 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_INITIAL_QUANT,
1182 | ct.sizeof(mmal.MMAL_PARAMETER_UINT32_T),
1183 | ),
1184 | quality
1185 | )
1186 | mmal_check(
1187 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1188 | prefix="Unable to set initial quality")
1189 | mp = mmal.MMAL_PARAMETER_UINT32_T(
1190 | mmal.MMAL_PARAMETER_HEADER_T(
1191 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_MIN_QUANT,
1192 | ct.sizeof(mmal.MMAL_PARAMETER_UINT32_T),
1193 | ),
1194 | quality,
1195 | )
1196 | mmal_check(
1197 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1198 | prefix="Unable to set minimum quality")
1199 | mp = mmal.MMAL_PARAMETER_UINT32_T(
1200 | mmal.MMAL_PARAMETER_HEADER_T(
1201 | mmal.MMAL_PARAMETER_VIDEO_ENCODE_MAX_QUANT,
1202 | ct.sizeof(mmal.MMAL_PARAMETER_UINT32_T),
1203 | ),
1204 | quality,
1205 | )
1206 | mmal_check(
1207 | mmal.mmal_port_parameter_set(self.output_port, mp.hdr),
1208 | prefix="Unable to set maximum quality")
1209 |
1210 | mmal_check(
1211 | mmal.mmal_port_parameter_set_boolean(
1212 | self.encoder[0].input[0],
1213 | mmal.MMAL_PARAMETER_VIDEO_IMMUTABLE_INPUT,
1214 | 1),
1215 | prefix="Unable to set immutable flag on encoder input port")
1216 |
1217 | mmal_check(
1218 | mmal.mmal_component_enable(self.encoder),
1219 | prefix="Unable to enable video encoder component")
1220 |
1221 | def start(self, output, motion_output=None):
1222 | """
1223 | Extended to initialize video frame meta-data tracking.
1224 | """
1225 | self.frame = PiVideoFrame(
1226 | index=0,
1227 | frame_type=None,
1228 | frame_size=0,
1229 | video_size=0,
1230 | split_size=0,
1231 | timestamp=0,
1232 | complete=False,
1233 | )
1234 | if motion_output is not None:
1235 | self._open_output(motion_output, PiVideoFrameType.motion_data)
1236 | super(PiVideoEncoder, self).start(output)
1237 |
1238 | def stop(self):
1239 | super(PiVideoEncoder, self).stop()
1240 | self._close_output(PiVideoFrameType.motion_data)
1241 |
1242 | def split(self, output, motion_output=None):
1243 | """
1244 | Called to switch the encoder's output.
1245 |
1246 | This method is called by
1247 | :meth:`~picamera.camera.PiCamera.split_recording` and
1248 | :meth:`~picamera.camera.PiCamera.record_sequence` to switch the
1249 | encoder's :attr:`output` object to the *output* parameter (which can be
1250 | a filename or a file-like object, as with :meth:`start`).
1251 | """
1252 | with self.outputs_lock:
1253 | outputs = {}
1254 | if output is not None:
1255 | outputs[PiVideoFrameType.frame] = output
1256 | if motion_output is not None:
1257 | outputs[PiVideoFrameType.motion_data] = motion_output
1258 | self._next_output.append(outputs)
1259 | # intra_period / framerate gives the time between I-frames (which
1260 | # should also coincide with SPS headers). We multiply by three to
1261 | # ensure the timeout is deliberately excessive, and clamp the minimum
1262 | # timeout to 1 second (otherwise unencoded formats tend to fail
1263 | # presumably due to I/O capacity)
1264 | timeout = max(1.0, float(self._intra_period / self.parent.framerate) * 3.0)
1265 | if not self.event.wait(timeout):
1266 | raise PiCameraRuntimeError(
1267 | 'Timed out waiting for a split point')
1268 | self.event.clear()
1269 |
1270 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
1271 | """
1272 | Extended to implement video frame meta-data tracking, and to handle
1273 | splitting video recording to the next output when :meth:`split` is
1274 | called.
1275 | """
1276 | self.frame = PiVideoFrame(
1277 | index=
1278 | self.frame.index + 1
1279 | if self.frame.complete else
1280 | self.frame.index,
1281 | frame_type=
1282 | PiVideoFrameType.key_frame
1283 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_KEYFRAME else
1284 | PiVideoFrameType.sps_header
1285 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CONFIG else
1286 | PiVideoFrameType.motion_data
1287 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else
1288 | PiVideoFrameType.frame,
1289 | frame_size=
1290 | buf[0].length
1291 | if self.frame.complete else
1292 | self.frame.frame_size + buf[0].length,
1293 | video_size=
1294 | self.frame.video_size
1295 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else
1296 | self.frame.video_size + buf[0].length,
1297 | split_size=
1298 | self.frame.split_size
1299 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else
1300 | self.frame.split_size + buf[0].length,
1301 | timestamp=
1302 | None
1303 | if buf[0].pts in (0, mmal.MMAL_TIME_UNKNOWN) else
1304 | buf[0].pts,
1305 | complete=
1306 | bool(buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END),
1307 | )
1308 | if self.format != 'h264' or (buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CONFIG):
1309 | with self.outputs_lock:
1310 | try:
1311 | new_outputs = self._next_output.pop(0)
1312 | except IndexError:
1313 | new_outputs = None
1314 | if new_outputs:
1315 | for new_key, new_output in new_outputs.items():
1316 | self._close_output(new_key)
1317 | self._open_output(new_output, new_key)
1318 | if new_key == PiVideoFrameType.frame:
1319 | self.frame = PiVideoFrame(
1320 | index=self.frame.index,
1321 | frame_type=self.frame.frame_type,
1322 | frame_size=self.frame.frame_size,
1323 | video_size=self.frame.video_size,
1324 | split_size=0,
1325 | timestamp=self.frame.timestamp,
1326 | complete=self.frame.complete,
1327 | )
1328 | self.event.set()
1329 | if buf[0].flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO:
1330 | key = PiVideoFrameType.motion_data
1331 | return super(PiVideoEncoder, self)._callback_write(buf, key)
1332 |
1333 |
1334 | class PiCookedVideoEncoder(PiVideoEncoder):
1335 | """
1336 | Video encoder for encoded recordings.
1337 |
1338 | This class is a derivative of :class:`PiVideoEncoder` and only exists to
1339 | provide naming symmetry with the image encoder classes.
1340 | """
1341 |
1342 |
1343 | class PiRawVideoEncoder(PiRawMixin, PiVideoEncoder):
1344 | """
1345 | Video encoder for unencoded recordings.
1346 |
1347 | This class is a derivative of :class:`PiVideoEncoder` and the
1348 | :class:`PiRawMixin` class intended for use with
1349 | :meth:`~picamera.camera.PiCamera.start_recording` when it is called with an
1350 | unencoded format.
1351 |
1352 | .. warning::
1353 |
1354 | This class creates an inheritance diamond. Take care to determine the
1355 | MRO of super-class calls.
1356 | """
1357 |
1358 | def _create_encoder(self):
1359 | super(PiRawVideoEncoder, self)._create_encoder()
1360 | # Raw formats don't have an intra_period setting as such, but as every
1361 | # frame is a full-frame, the intra_period is effectively 1
1362 | self._intra_period = 1
1363 |
1364 |
1365 | class PiImageEncoder(PiEncoder):
1366 | """
1367 | Encoder for image capture.
1368 |
1369 | This derivative of :class:`PiEncoder` extends the :meth:`_create_encoder`
1370 | method to configure the encoder for a variety of encoded image outputs
1371 | (JPEG, PNG, etc.).
1372 | """
1373 |
1374 | encoder_type = mmal.MMAL_COMPONENT_DEFAULT_IMAGE_ENCODER
1375 |
1376 | def _create_encoder(self, quality=85, thumbnail=(64, 48, 35), bayer=False):
1377 | """
1378 | Extends the base :meth:`~PiEncoder._create_encoder` implementation to
1379 | configure the image encoder for JPEG, PNG, etc.
1380 | """
1381 | super(PiImageEncoder, self)._create_encoder()
1382 |
1383 | try:
1384 | self.output_port[0].format[0].encoding = {
1385 | 'jpeg': mmal.MMAL_ENCODING_JPEG,
1386 | 'png': mmal.MMAL_ENCODING_PNG,
1387 | 'gif': mmal.MMAL_ENCODING_GIF,
1388 | 'bmp': mmal.MMAL_ENCODING_BMP,
1389 | }[self.format]
1390 | except KeyError:
1391 | raise PiCameraValueError("Unrecognized format %s" % self.format)
1392 | mmal_check(
1393 | mmal.mmal_port_format_commit(self.output_port),
1394 | prefix="Unable to set format on encoder output port")
1395 |
1396 | if self.format == 'jpeg':
1397 | mmal_check(
1398 | mmal.mmal_port_parameter_set_uint32(
1399 | self.output_port,
1400 | mmal.MMAL_PARAMETER_JPEG_Q_FACTOR,
1401 | quality),
1402 | prefix="Failed to set JPEG quality")
1403 |
1404 | mmal_check(
1405 | mmal.mmal_port_parameter_set_boolean(
1406 | self.camera_port,
1407 | mmal.MMAL_PARAMETER_ENABLE_RAW_CAPTURE,
1408 | int(bool(bayer))),
1409 | prefix="Failed to set raw capture")
1410 |
1411 | if thumbnail is None:
1412 | mp = mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T(
1413 | mmal.MMAL_PARAMETER_HEADER_T(
1414 | mmal.MMAL_PARAMETER_THUMBNAIL_CONFIGURATION,
1415 | ct.sizeof(mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T)
1416 | ),
1417 | 0, 0, 0, 0)
1418 | else:
1419 | mp = mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T(
1420 | mmal.MMAL_PARAMETER_HEADER_T(
1421 | mmal.MMAL_PARAMETER_THUMBNAIL_CONFIGURATION,
1422 | ct.sizeof(mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T)
1423 | ),
1424 | 1, *thumbnail)
1425 | mmal_check(
1426 | mmal.mmal_port_parameter_set(self.encoder[0].control, mp.hdr),
1427 | prefix="Failed to set thumbnail configuration")
1428 |
1429 | mmal_check(
1430 | mmal.mmal_component_enable(self.encoder),
1431 | prefix="Unable to enable encoder component")
1432 |
1433 |
1434 | class PiOneImageEncoder(PiImageEncoder):
1435 | """
1436 | Encoder for single image capture.
1437 |
1438 | This class simply extends :meth:`~PiEncoder._callback_write` to terminate
1439 | capture at frame end (i.e. after a single frame has been received).
1440 | """
1441 |
1442 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
1443 | return (
1444 | super(PiOneImageEncoder, self)._callback_write(buf, key)
1445 | ) or bool(
1446 | buf[0].flags & (
1447 | mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END |
1448 | mmal.MMAL_BUFFER_HEADER_FLAG_TRANSMISSION_FAILED)
1449 | )
1450 |
1451 |
1452 | class PiMultiImageEncoder(PiImageEncoder):
1453 | """
1454 | Encoder for multiple image capture.
1455 |
1456 | This class extends :class:`PiImageEncoder` to handle an iterable of outputs
1457 | instead of a single output. The :meth:`~PiEncoder._callback_write` method
1458 | is extended to terminate capture when the iterable is exhausted, while
1459 | :meth:`PiEncoder._open_output` is overridden to begin iteration and rely
1460 | on the new :meth:`_next_output` method to advance output to the next item
1461 | in the iterable.
1462 | """
1463 |
1464 | def _open_output(self, outputs, key=PiVideoFrameType.frame):
1465 | self._output_iter = iter(outputs)
1466 | self._next_output(key)
1467 |
1468 | def _next_output(self, key=PiVideoFrameType.frame):
1469 | """
1470 | This method moves output to the next item from the iterable passed to
1471 | :meth:`~PiEncoder.start`.
1472 | """
1473 | self._close_output(key)
1474 | super(PiMultiImageEncoder, self)._open_output(next(self._output_iter), key)
1475 |
1476 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
1477 | try:
1478 | if (
1479 | super(PiMultiImageEncoder, self)._callback_write(buf, key)
1480 | ) or bool(
1481 | buf[0].flags & (
1482 | mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END |
1483 | mmal.MMAL_BUFFER_HEADER_FLAG_TRANSMISSION_FAILED)
1484 | ):
1485 | self._next_output(key)
1486 | return False
1487 | except StopIteration:
1488 | return True
1489 |
1490 |
1491 | class PiCookedOneImageEncoder(PiOneImageEncoder):
1492 | """
1493 | Encoder for "cooked" (encoded) single image output.
1494 |
1495 | This encoder extends :class:`PiOneImageEncoder` to include Exif tags in the
1496 | output.
1497 | """
1498 |
1499 | exif_encoding = 'ascii'
1500 |
1501 | def _add_exif_tag(self, tag, value):
1502 | # Format the tag and value into an appropriate bytes string, encoded
1503 | # with the Exif encoding (ASCII)
1504 | if isinstance(tag, str):
1505 | tag = tag.encode(self.exif_encoding)
1506 | if isinstance(value, str):
1507 | value = value.encode(self.exif_encoding)
1508 | elif isinstance(value, datetime.datetime):
1509 | value = value.strftime('%Y:%m:%d %H:%M:%S').encode(self.exif_encoding)
1510 | # MMAL_PARAMETER_EXIF_T is a variable sized structure, hence all the
1511 | # mucking about with string buffers here...
1512 | buf = ct.create_string_buffer(
1513 | ct.sizeof(mmal.MMAL_PARAMETER_EXIF_T) + len(tag) + len(value) + 1)
1514 | mp = ct.cast(buf, ct.POINTER(mmal.MMAL_PARAMETER_EXIF_T))
1515 | mp[0].hdr.id = mmal.MMAL_PARAMETER_EXIF
1516 | mp[0].hdr.size = len(buf)
1517 | if (b'=' in tag or b'\x00' in value):
1518 | data = tag + value
1519 | mp[0].keylen = len(tag)
1520 | mp[0].value_offset = len(tag)
1521 | mp[0].valuelen = len(value)
1522 | else:
1523 | data = tag + b'=' + value
1524 | ct.memmove(mp[0].data, data, len(data))
1525 | mmal_check(
1526 | mmal.mmal_port_parameter_set(self.output_port, mp[0].hdr),
1527 | prefix="Failed to set Exif tag %s" % tag)
1528 |
1529 | def start(self, output):
1530 | timestamp = datetime.datetime.now()
1531 | timestamp_tags = (
1532 | 'EXIF.DateTimeDigitized',
1533 | 'EXIF.DateTimeOriginal',
1534 | 'IFD0.DateTime')
1535 | # Timestamp tags are always included with the value calculated
1536 | # above, but the user may choose to override the value in the
1537 | # exif_tags mapping
1538 | for tag in timestamp_tags:
1539 | self._add_exif_tag(tag, self.parent.exif_tags.get(tag, timestamp))
1540 | # All other tags are just copied in verbatim
1541 | for tag, value in self.parent.exif_tags.items():
1542 | if not tag in timestamp_tags:
1543 | self._add_exif_tag(tag, value)
1544 | super(PiCookedOneImageEncoder, self).start(output)
1545 |
1546 |
1547 | class PiCookedMultiImageEncoder(PiMultiImageEncoder):
1548 | """
1549 | Encoder for "cooked" (encoded) multiple image output.
1550 |
1551 | This encoder descends from :class:`PiMultiImageEncoder` but includes no
1552 | new functionality as video-port based encodes (which is all this class
1553 | is used for) don't support Exif tag output.
1554 | """
1555 | pass
1556 |
1557 |
1558 | class PiRawImageMixin(PiRawMixin, PiImageEncoder):
1559 | """
1560 | Mixin class for "raw" (unencoded) image capture.
1561 |
1562 | The :meth:`_callback_write` method is overridden to manually calculate when
1563 | to terminate output.
1564 | """
1565 |
1566 | def __init__(
1567 | self, parent, camera_port, input_port, format, resize, **options):
1568 | super(PiRawImageMixin, self).__init__(
1569 | parent, camera_port, input_port, format, resize, **options)
1570 | self._image_size = 0
1571 |
1572 | def _callback_write(self, buf, key=PiVideoFrameType.frame):
1573 | """
1574 | Overridden to manually calculate when to terminate capture (see
1575 | comments in :meth:`__init__`).
1576 | """
1577 | if self._image_size > 0:
1578 | super(PiRawImageMixin, self)._callback_write(buf, key)
1579 | self._image_size -= buf[0].length
1580 | return self._image_size <= 0
1581 |
1582 | def start(self, output):
1583 | self._image_size = self._frame_size
1584 | super(PiRawImageMixin, self).start(output)
1585 |
1586 |
1587 | class PiRawOneImageEncoder(PiOneImageEncoder, PiRawImageMixin):
1588 | """
1589 | Single image encoder for unencoded capture.
1590 |
1591 | This class is a derivative of :class:`PiOneImageEncoder` and the
1592 | :class:`PiRawImageMixin` class intended for use with
1593 | :meth:`~picamera.camera.PiCamera.capture` (et al) when it is called with an
1594 | unencoded image format.
1595 |
1596 | .. warning::
1597 |
1598 | This class creates an inheritance diamond. Take care to determine the
1599 | MRO of super-class calls.
1600 | """
1601 | pass
1602 |
1603 |
1604 | class PiRawMultiImageEncoder(PiMultiImageEncoder, PiRawImageMixin):
1605 | """
1606 | Multiple image encoder for unencoded capture.
1607 |
1608 | This class is a derivative of :class:`PiMultiImageEncoder` and the
1609 | :class:`PiRawImageMixin` class intended for use with
1610 | :meth:`~picamera.camera.PiCamera.capture_sequence` when it is called with
1611 | an unencoded image format.
1612 |
1613 | .. warning::
1614 |
1615 | This class creates an inheritance diamond. Take care to determine the
1616 | MRO of super-class calls.
1617 | """
1618 | def _next_output(self, key=PiVideoFrameType.frame):
1619 | super(PiRawMultiImageEncoder, self)._next_output(key)
1620 | self._image_size = self._frame_size
1621 |
1622 |
--------------------------------------------------------------------------------
/picamera/exc.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | This module defines the exceptions used by picamera. All exception classes
32 | utilize multiple inheritance in order to make testing for exception types more
33 | intuitive. For example, :exc:`PiCameraValueError` derives from both
34 | :exc:`PiCameraError` and :exc:`ValueError`. Hence it will be caught by blocks
35 | intended to catch any error specific to the picamera library::
36 |
37 | try:
38 | camera.brightness = int(some_user_value)
39 | except PiCameraError:
40 | print('Something went wrong with the camera')
41 |
42 | Or by blocks intended to catch value errors::
43 |
44 | try:
45 | camera.contrast = int(some_user_value)
46 | except ValueError:
47 | print('Invalid value')
48 |
49 | .. note::
50 |
51 | All classes in this module are available from the :mod:`picamera` namespace
52 | without having to import :mod:`picamera.streams` directly.
53 |
54 | The following classes are defined in the module:
55 |
56 |
57 | .. autoexception:: PiCameraWarning
58 |
59 | .. autoexception:: PiCameraDeprecated
60 |
61 | .. autoexception:: PiCameraFallback
62 |
63 | .. autoexception:: PiCameraError
64 |
65 | .. autoexception:: PiCameraValueError
66 |
67 | .. autoexception:: PiCameraRuntimeError
68 |
69 | .. autoexception:: PiCameraClosed
70 |
71 | .. autoexception:: PiCameraNotRecording
72 |
73 | .. autoexception:: PiCameraAlreadyRecording
74 |
75 | .. autoexception:: PiCameraMMALError
76 |
77 | .. autofunction:: mmal_check
78 |
79 | """
80 |
81 | from __future__ import (
82 | unicode_literals,
83 | print_function,
84 | division,
85 | absolute_import,
86 | )
87 |
88 | # Make Py2's str equivalent to Py3's
89 | str = type('')
90 |
91 |
92 | import picamera.mmal as mmal
93 |
94 |
95 | class PiCameraWarning(Warning):
96 | """
97 | Base class for PiCamera warnings.
98 | """
99 |
100 |
101 | class PiCameraDeprecated(PiCameraWarning, DeprecationWarning):
102 | """
103 | Raised when deprecated functionality in picamera is used.
104 | """
105 |
106 |
107 | class PiCameraFallback(PiCameraWarning, RuntimeWarning):
108 | """
109 | Raised when picamera has to fallback on old functionality.
110 | """
111 |
112 |
113 | class PiCameraError(Exception):
114 | """
115 | Base class for PiCamera errors.
116 | """
117 |
118 |
119 | class PiCameraRuntimeError(PiCameraError, RuntimeError):
120 | """
121 | Raised when an invalid sequence of operations is attempted with a
122 | :class:`~picamera.camera.PiCamera` object.
123 | """
124 |
125 |
126 | class PiCameraClosed(PiCameraRuntimeError):
127 | """
128 | Raised when a method is called on a camera which has already been closed.
129 | """
130 |
131 |
132 | class PiCameraNotRecording(PiCameraRuntimeError):
133 | """
134 | Raised when :meth:`~picamera.camera.PiCamera.stop_recording` or
135 | :meth:`~picamera.camera.PiCamera.split_recording` are called against a port
136 | which has no recording active.
137 | """
138 |
139 |
140 | class PiCameraAlreadyRecording(PiCameraRuntimeError):
141 | """
142 | Raised when :meth:`~picamera.camera.PiCamera.start_recording` or
143 | :meth:`~picamera.camera.PiCamera.record_sequence` are called against a port
144 | which already has an active recording.
145 | """
146 |
147 |
148 | class PiCameraValueError(PiCameraError, ValueError):
149 | """
150 | Raised when an invalid value is fed to a :class:`~picamera.camera.PiCamera`
151 | object.
152 | """
153 |
154 |
155 | class PiCameraMMALError(PiCameraError):
156 | """
157 | Raised when an MMAL operation fails for whatever reason.
158 | """
159 | def __init__(self, status, prefix=""):
160 | self.status = status
161 | PiCameraError.__init__(self, "%s%s%s" % (prefix, ": " if prefix else "", {
162 | mmal.MMAL_ENOMEM: "Out of memory",
163 | mmal.MMAL_ENOSPC: "Out of resources (other than memory)",
164 | mmal.MMAL_EINVAL: "Argument is invalid",
165 | mmal.MMAL_ENOSYS: "Function not implemented",
166 | mmal.MMAL_ENOENT: "No such file or directory",
167 | mmal.MMAL_ENXIO: "No such device or address",
168 | mmal.MMAL_EIO: "I/O error",
169 | mmal.MMAL_ESPIPE: "Illegal seek",
170 | mmal.MMAL_ECORRUPT: "Data is corrupt #FIXME not POSIX",
171 | mmal.MMAL_ENOTREADY: "Component is not ready #FIXME not POSIX",
172 | mmal.MMAL_ECONFIG: "Component is not configured #FIXME not POSIX",
173 | mmal.MMAL_EISCONN: "Port is already connected",
174 | mmal.MMAL_ENOTCONN: "Port is disconnected",
175 | mmal.MMAL_EAGAIN: "Resource temporarily unavailable; try again later",
176 | mmal.MMAL_EFAULT: "Bad address",
177 | }.get(status, "Unknown status error")))
178 |
179 |
180 | def mmal_check(status, prefix=""):
181 | """
182 | Checks the return status of an mmal call and raises an exception on
183 | failure.
184 |
185 | The *status* parameter is the result of an MMAL call. If *status* is
186 | anything other than MMAL_SUCCESS, a :exc:`PiCameraMMALError` exception is
187 | raised. The optional *prefix* parameter specifies a prefix message to place
188 | at the start of the exception's message to provide some context.
189 | """
190 | if status != mmal.MMAL_SUCCESS:
191 | raise PiCameraMMALError(status, prefix)
192 |
193 |
--------------------------------------------------------------------------------
/picamera/renderers.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The renderers module defines the renderer classes used by the camera to provide
32 | preview and overlay output on the Pi's display. Users will rarely need to
33 | construct instances of these classes directly
34 | (:meth:`~picamera.camera.PiCamera.start_preview` and
35 | :meth:`~picamera.camera.PiCamera.add_overlay` are generally used instead) but
36 | may find the attribute references for them useful.
37 |
38 | .. note::
39 |
40 | All classes in this module are available from the :mod:`picamera` namespace
41 | without having to import :mod:`picamera.renderers` directly.
42 |
43 | The following classes are defined in the module:
44 |
45 |
46 | PiRenderer
47 | ==========
48 |
49 | .. autoclass:: PiRenderer
50 | :members:
51 |
52 |
53 | PiOverlayRenderer
54 | =================
55 |
56 | .. autoclass:: PiOverlayRenderer
57 | :members:
58 |
59 |
60 | PiPreviewRenderer
61 | =================
62 |
63 | .. autoclass:: PiPreviewRenderer
64 | :members:
65 |
66 |
67 | PiNullSink
68 | ==========
69 |
70 | .. autoclass:: PiNullSink
71 | :members:
72 |
73 | """
74 |
75 | from __future__ import (
76 | unicode_literals,
77 | print_function,
78 | division,
79 | absolute_import,
80 | )
81 |
82 | # Make Py2's str and range equivalent to Py3's
83 | str = type('')
84 |
85 | import ctypes as ct
86 |
87 | import picamera.mmal as mmal
88 | from picamera.exc import (
89 | PiCameraRuntimeError,
90 | PiCameraValueError,
91 | mmal_check,
92 | )
93 |
94 |
95 | def _overlay_callback(port, buf):
96 | mmal.mmal_buffer_header_release(buf)
97 | _overlay_callback = mmal.MMAL_PORT_BH_CB_T(_overlay_callback)
98 |
99 |
100 | class PiRenderer(object):
101 | """
102 | Base implementation of an MMAL video renderer for use by PiCamera.
103 |
104 | The *parent* parameter specifies the :class:`~picamera.camera.PiCamera`
105 | instance that has constructed this renderer. The *layer* parameter
106 | specifies the layer that the renderer will inhabit. Higher numbered layers
107 | obscure lower numbered layers (unless they are partially transparent). The
108 | initial opacity of the renderer is specified by the *alpha* parameter
109 | (which defaults to 255, meaning completely opaque). The *fullscreen*
110 | parameter which defaults to ``True`` indicates whether the renderer should
111 | occupy the entire display. Finally, the *window* parameter (which only has
112 | meaning when *fullscreen* is ``False``) is a four-tuple of ``(x, y, width,
113 | height)`` which gives the screen coordinates that the renderer should
114 | occupy when it isn't full-screen.
115 |
116 | This base class isn't directly used by :class:`~picamera.camera.PiCamera`,
117 | but the two derivatives defined below, :class:`PiOverlayRenderer` and
118 | :class:`PiPreviewRenderer`, are used to produce overlays and the camera
119 | preview respectively.
120 | """
121 |
122 | def __init__(
123 | self, parent, layer=0, alpha=255, fullscreen=True, window=None,
124 | crop=None, rotation=0, vflip=False, hflip=False):
125 | # Create and enable the renderer component
126 | self._rotation = 0
127 | self._vflip = False
128 | self._hflip = False
129 | self.parent = parent
130 | self.renderer = ct.POINTER(mmal.MMAL_COMPONENT_T)()
131 | mmal_check(
132 | mmal.mmal_component_create(
133 | mmal.MMAL_COMPONENT_DEFAULT_VIDEO_RENDERER, self.renderer),
134 | prefix="Failed to create renderer component")
135 | try:
136 | if not self.renderer[0].input_num:
137 | raise PiCameraError("No input ports on renderer component")
138 |
139 | self.layer = layer
140 | self.alpha = alpha
141 | self.fullscreen = fullscreen
142 | if window is not None:
143 | self.window = window
144 | if crop is not None:
145 | self.crop = crop
146 | self.rotation = rotation
147 | self.vflip = vflip
148 | self.hflip = hflip
149 |
150 | mmal_check(
151 | mmal.mmal_component_enable(self.renderer),
152 | prefix="Renderer component couldn't be enabled")
153 | except:
154 | mmal.mmal_component_destroy(self.renderer)
155 | raise
156 |
157 | def close(self):
158 | """
159 | Finalizes the renderer and deallocates all structures.
160 |
161 | This method is called by the camera prior to destroying the renderer
162 | (or more precisely, letting it go out of scope to permit the garbage
163 | collector to destroy it at some future time).
164 | """
165 | if self.renderer:
166 | mmal.mmal_component_destroy(self.renderer)
167 | self.renderer = None
168 |
169 | def __enter__(self):
170 | return self
171 |
172 | def __exit__(self, exc_type, exc_value, exc_tb):
173 | self.close()
174 |
175 | def _get_alpha(self):
176 | mp = mmal.MMAL_DISPLAYREGION_T(
177 | mmal.MMAL_PARAMETER_HEADER_T(
178 | mmal.MMAL_PARAMETER_DISPLAYREGION,
179 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
180 | ))
181 | mmal_check(
182 | mmal.mmal_port_parameter_get(self.renderer[0].input[0], mp.hdr),
183 | prefix="Failed to get alpha")
184 | return mp.alpha
185 | def _set_alpha(self, value):
186 | try:
187 | if not (0 <= value <= 255):
188 | raise PiCameraValueError(
189 | "Invalid alpha value: %d (valid range 0..255)" % value)
190 | except TypeError:
191 | raise PiCameraValueError("Invalid alpha value: %s" % value)
192 | mp = mmal.MMAL_DISPLAYREGION_T(
193 | mmal.MMAL_PARAMETER_HEADER_T(
194 | mmal.MMAL_PARAMETER_DISPLAYREGION,
195 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
196 | ),
197 | set=mmal.MMAL_DISPLAY_SET_ALPHA,
198 | alpha=value
199 | )
200 | mmal_check(
201 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
202 | prefix="Failed to set alpha")
203 | alpha = property(_get_alpha, _set_alpha, doc="""
204 | Retrieves or sets the opacity of the renderer.
205 |
206 | When queried, the :attr:`alpha` property returns a value between 0 and
207 | 255 indicating the opacity of the renderer, where 0 is completely
208 | transparent and 255 is completely opaque. The default value is 255. The
209 | property can be set while recordings or previews are in progress.
210 | """)
211 |
212 | def _get_layer(self):
213 | mp = mmal.MMAL_DISPLAYREGION_T(
214 | mmal.MMAL_PARAMETER_HEADER_T(
215 | mmal.MMAL_PARAMETER_DISPLAYREGION,
216 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
217 | ))
218 | mmal_check(
219 | mmal.mmal_port_parameter_get(self.renderer[0].input[0], mp.hdr),
220 | prefix="Failed to get layer")
221 | return mp.layer
222 | def _set_layer(self, value):
223 | try:
224 | if not (0 <= value <= 255):
225 | raise PiCameraValueError(
226 | "Invalid layer value: %d (valid range 0..255)" % value)
227 | except TypeError:
228 | raise PiCameraValueError("Invalid layer value: %s" % value)
229 | mp = mmal.MMAL_DISPLAYREGION_T(
230 | mmal.MMAL_PARAMETER_HEADER_T(
231 | mmal.MMAL_PARAMETER_DISPLAYREGION,
232 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
233 | ),
234 | set=mmal.MMAL_DISPLAY_SET_LAYER,
235 | layer=value
236 | )
237 | mmal_check(
238 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
239 | prefix="Failed to set layer")
240 | layer = property(
241 | _get_layer, _set_layer, doc="""
242 | Retrieves of sets the layer of the renderer.
243 |
244 | The :attr:`layer` property is an integer which controls the layer that
245 | the renderer occupies. Higher valued layers obscure lower valued layers
246 | (with 0 being the "bottom" layer). The default value is 2. The property
247 | can be set while recordings or previews are in progress.
248 | """)
249 |
250 | def _get_fullscreen(self):
251 | mp = mmal.MMAL_DISPLAYREGION_T(
252 | mmal.MMAL_PARAMETER_HEADER_T(
253 | mmal.MMAL_PARAMETER_DISPLAYREGION,
254 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
255 | ))
256 | mmal_check(
257 | mmal.mmal_port_parameter_get(self.renderer[0].input[0], mp.hdr),
258 | prefix="Failed to get fullscreen")
259 | return mp.fullscreen != mmal.MMAL_FALSE
260 | def _set_fullscreen(self, value):
261 | mp = mmal.MMAL_DISPLAYREGION_T(
262 | mmal.MMAL_PARAMETER_HEADER_T(
263 | mmal.MMAL_PARAMETER_DISPLAYREGION,
264 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
265 | ),
266 | set=mmal.MMAL_DISPLAY_SET_FULLSCREEN,
267 | fullscreen=bool(value)
268 | )
269 | mmal_check(
270 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
271 | prefix="Failed to set fullscreen")
272 | fullscreen = property(
273 | _get_fullscreen, _set_fullscreen, doc="""
274 | Retrieves or sets whether the renderer appears full-screen.
275 |
276 | The :attr:`fullscreen` property is a bool which controls whether the
277 | renderer takes up the entire display or not. When set to ``False``, the
278 | :attr:`window` property can be used to control the precise size of the
279 | renderer display. The property can be set while recordings or previews
280 | are active.
281 | """)
282 |
283 | def _get_window(self):
284 | mp = mmal.MMAL_DISPLAYREGION_T(
285 | mmal.MMAL_PARAMETER_HEADER_T(
286 | mmal.MMAL_PARAMETER_DISPLAYREGION,
287 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
288 | ))
289 | mmal_check(
290 | mmal.mmal_port_parameter_get(self.renderer[0].input[0], mp.hdr),
291 | prefix="Failed to get window")
292 | return (
293 | mp.dest_rect.x,
294 | mp.dest_rect.y,
295 | mp.dest_rect.width,
296 | mp.dest_rect.height,
297 | )
298 | def _set_window(self, value):
299 | try:
300 | x, y, w, h = value
301 | except (TypeError, ValueError) as e:
302 | raise PiCameraValueError(
303 | "Invalid window rectangle (x, y, w, h) tuple: %s" % value)
304 | mp = mmal.MMAL_DISPLAYREGION_T(
305 | mmal.MMAL_PARAMETER_HEADER_T(
306 | mmal.MMAL_PARAMETER_DISPLAYREGION,
307 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
308 | ),
309 | set=mmal.MMAL_DISPLAY_SET_DEST_RECT,
310 | dest_rect=mmal.MMAL_RECT_T(x, y, w, h),
311 | )
312 | mmal_check(
313 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
314 | prefix="Failed to set window")
315 | window = property(_get_window, _set_window, doc="""
316 | Retrieves or sets the size of the renderer.
317 |
318 | When the :attr:`fullscreen` property is set to ``False``, the
319 | :attr:`window` property specifies the size and position of the renderer
320 | on the display. The property is a 4-tuple consisting of ``(x, y, width,
321 | height)``. The property can be set while recordings or previews are
322 | active.
323 | """)
324 |
325 | def _get_crop(self):
326 | mp = mmal.MMAL_DISPLAYREGION_T(
327 | mmal.MMAL_PARAMETER_HEADER_T(
328 | mmal.MMAL_PARAMETER_DISPLAYREGION,
329 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
330 | ))
331 | mmal_check(
332 | mmal.mmal_port_parameter_get(self.renderer[0].input[0], mp.hdr),
333 | prefix="Failed to get crop")
334 | return (
335 | mp.src_rect.x,
336 | mp.src_rect.y,
337 | mp.src_rect.width,
338 | mp.src_rect.height,
339 | )
340 | def _set_crop(self, value):
341 | try:
342 | x, y, w, h = value
343 | except (TypeError, ValueError) as e:
344 | raise PiCameraValueError(
345 | "Invalid crop rectangle (x, y, w, h) tuple: %s" % value)
346 | mp = mmal.MMAL_DISPLAYREGION_T(
347 | mmal.MMAL_PARAMETER_HEADER_T(
348 | mmal.MMAL_PARAMETER_DISPLAYREGION,
349 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
350 | ),
351 | set=mmal.MMAL_DISPLAY_SET_SRC_RECT,
352 | src_rect=mmal.MMAL_RECT_T(x, y, w, h),
353 | )
354 | mmal_check(
355 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
356 | prefix="Failed to set crop")
357 | crop = property(_get_crop, _set_crop, doc="""
358 | Retrieves or sets the area to read from the source.
359 |
360 | The :attr:`crop` property specifies the rectangular area that the
361 | renderer will read from the source as a 4-tuple of ``(x, y, width,
362 | height)``. The special value ``(0, 0, 0, 0)`` (which is also the
363 | default) means to read entire area of the source. The property can be
364 | set while recordings or previews are active.
365 |
366 | For example, if the camera's resolution is currently configured as
367 | 1280x720, setting this attribute to ``(160, 160, 640, 400)`` will
368 | crop the preview to the center 640x400 pixels of the input. Note that
369 | this property does not affect the size of the output rectangle,
370 | which is controlled with :attr:`fullscreen` and :attr:`window`.
371 |
372 | .. note::
373 |
374 | This property only affects the renderer; it has no bearing on image
375 | captures or recordings (unlike the
376 | :attr:`~picamera.camera.PiCamera.zoom` property of the
377 | :class:`~picamera.camera.PiCamera` class).
378 | """)
379 |
380 | def _get_rotation(self):
381 | return self._rotation
382 | def _set_rotation(self, value):
383 | try:
384 | value = ((int(value) % 360) // 90) * 90
385 | except ValueError:
386 | raise PiCameraValueError("Invalid rotation angle: %s" % value)
387 | self._set_transform(
388 | self._get_transform(value, self._vflip, self._hflip))
389 | self._rotation = value
390 | rotation = property(_get_rotation, _set_rotation, doc="""
391 | Retrieves of sets the current rotation of the renderer.
392 |
393 | When queried, the :attr:`rotation` property returns the rotation
394 | applied to the renderer. Valid values are 0, 90, 180, and 270.
395 |
396 | When set, the property changes the rotation applied to the renderer's
397 | output. The property can be set while recordings or previews are
398 | active. The default is 0.
399 |
400 | .. note::
401 |
402 | This property only affects the renderer; it has no bearing on image
403 | captures or recordings (unlike the
404 | :attr:`~picamera.camera.PiCamera.rotation` property of the
405 | :class:`~picamera.camera.PiCamera` class).
406 | """)
407 |
408 | def _get_vflip(self):
409 | return self._vflip
410 | def _set_vflip(self, value):
411 | value = bool(value)
412 | self._set_transform(
413 | self._get_transform(self._rotation, value, self._hflip))
414 | self._vflip = value
415 | vflip = property(_get_vflip, _set_vflip, doc="""
416 | Retrieves of sets whether the renderer's output is vertically flipped.
417 |
418 | When queried, the :attr:`vflip` property returns a boolean indicating
419 | whether or not the renderer's output is vertically flipped. The
420 | property can be set while recordings or previews are in progress. The
421 | default is ``False``.
422 |
423 | .. note::
424 |
425 | This property only affects the renderer; it has no bearing on image
426 | captures or recordings (unlike the
427 | :attr:`~picamera.camera.PiCamera.vflip` property of the
428 | :class:`~picamera.camera.PiCamera` class).
429 | """)
430 |
431 | def _get_hflip(self):
432 | return self._hflip
433 | def _set_hflip(self, value):
434 | value = bool(value)
435 | self._set_transform(
436 | self._get_transform(self._rotation, self._vflip, value))
437 | self._hflip = value
438 | hflip = property(_get_hflip, _set_hflip, doc="""
439 | Retrieves of sets whether the renderer's output is horizontally
440 | flipped.
441 |
442 | When queried, the :attr:`vflip` property returns a boolean indicating
443 | whether or not the renderer's output is horizontally flipped. The
444 | property can be set while recordings or previews are in progress. The
445 | default is ``False``.
446 |
447 | .. note::
448 |
449 | This property only affects the renderer; it has no bearing on image
450 | captures or recordings (unlike the
451 | :attr:`~picamera.camera.PiCamera.hflip` property of the
452 | :class:`~picamera.camera.PiCamera` class).
453 | """)
454 |
455 | def _get_transform(self, rotate, vflip, hflip):
456 | # Use a (horizontally) mirrored transform if one of vflip or hflip is
457 | # set. If vflip is set, rotate by an extra 180 degrees to make up for
458 | # the lack of a "true" vertical flip
459 | mirror = vflip ^ hflip
460 | if vflip:
461 | rotate = (rotate + 180) % 360
462 | return {
463 | (0, False): mmal.MMAL_DISPLAY_ROT0,
464 | (90, False): mmal.MMAL_DISPLAY_ROT90,
465 | (180, False): mmal.MMAL_DISPLAY_ROT180,
466 | (270, False): mmal.MMAL_DISPLAY_ROT270,
467 | (0, True): mmal.MMAL_DISPLAY_MIRROR_ROT0,
468 | (90, True): mmal.MMAL_DISPLAY_MIRROR_ROT90,
469 | (180, True): mmal.MMAL_DISPLAY_MIRROR_ROT180,
470 | (270, True): mmal.MMAL_DISPLAY_MIRROR_ROT270,
471 | }[(rotate, mirror)]
472 |
473 | def _set_transform(self, value):
474 | mp = mmal.MMAL_DISPLAYREGION_T(
475 | mmal.MMAL_PARAMETER_HEADER_T(
476 | mmal.MMAL_PARAMETER_DISPLAYREGION,
477 | ct.sizeof(mmal.MMAL_DISPLAYREGION_T)
478 | ),
479 | set=mmal.MMAL_DISPLAY_SET_TRANSFORM,
480 | transform=value,
481 | )
482 | mmal_check(
483 | mmal.mmal_port_parameter_set(self.renderer[0].input[0], mp.hdr),
484 | prefix="Failed to set transform")
485 |
486 |
487 | class PiOverlayRenderer(PiRenderer):
488 | """
489 | Represents an MMAL renderer with a static source for overlays.
490 |
491 | This class descends from :class:`PiRenderer` and adds a static source for
492 | the MMAL renderer. The optional *size* parameter specifies the size of the
493 | source image as a ``(width, height)`` tuple. If this is omitted or ``None``
494 | then the size is assumed to be the same as the parent camera's current
495 | :attr:`~picamera.camera.PiCamera.resolution`.
496 |
497 | The *source* must be an object that supports the :ref:`buffer protocol
498 | ` which has the same length as an image in `RGB`_ format
499 | (colors represented as interleaved unsigned bytes) with the specified
500 | *size* after the width has been rounded up to the nearest multiple of 32,
501 | and the height has been rounded up to the nearest multiple of 16.
502 |
503 | For example, if *size* is ``(1280, 720)``, then *source* must be a buffer
504 | with length 1280 x 720 x 3 bytes, or 2,764,800 bytes (because 1280 is a
505 | multiple of 32, and 720 is a multiple of 16 no extra rounding is required).
506 | However, if *size* is ``(97, 57)``, then *source* must be a buffer with
507 | length 128 x 64 x 3 bytes, or 24,576 bytes (pixels beyond column 97 and row
508 | 57 in the source will be ignored).
509 |
510 | The *layer*, *alpha*, *fullscreen*, and *window* parameters are the same
511 | as in :class:`PiRenderer`.
512 |
513 | .. _RGB: http://en.wikipedia.org/wiki/RGB
514 | """
515 |
516 | def __init__(
517 | self, parent, source, size=None, layer=0, alpha=255,
518 | fullscreen=True, window=None, crop=None, rotation=0, vflip=False,
519 | hflip=False):
520 | super(PiOverlayRenderer, self).__init__(
521 | parent, layer, alpha, fullscreen, window, crop,
522 | rotation, vflip, hflip)
523 |
524 | # Copy format from camera's preview port, then adjust the encoding to
525 | # RGB888 and optionally adjust the resolution and size
526 | port = self.renderer[0].input[0]
527 | fmt = port[0].format
528 | mmal.mmal_format_copy(
529 | fmt, parent._camera[0].output[parent.CAMERA_PREVIEW_PORT][0].format)
530 | fmt[0].encoding = mmal.MMAL_ENCODING_RGB24
531 | fmt[0].encoding_variant = mmal.MMAL_ENCODING_RGB24
532 | if size is not None:
533 | w, h = size
534 | fmt[0].es[0].video.width = mmal.VCOS_ALIGN_UP(w, 32)
535 | fmt[0].es[0].video.height = mmal.VCOS_ALIGN_UP(h, 16)
536 | fmt[0].es[0].video.crop.width = w
537 | fmt[0].es[0].video.crop.height = h
538 | mmal_check(
539 | mmal.mmal_port_format_commit(port),
540 | prefix="Overlay format couldn't be set")
541 | port[0].buffer_num = port[0].buffer_num_min
542 | port[0].buffer_size = port[0].buffer_size_recommended
543 |
544 | mmal_check(
545 | mmal.mmal_component_enable(self.renderer),
546 | prefix="Overlay couldn't be enabled")
547 |
548 | mmal_check(
549 | mmal.mmal_port_enable(port, _overlay_callback),
550 | prefix="Overlay input port couldn't be enabled")
551 |
552 | self.pool = mmal.mmal_port_pool_create(
553 | port, port[0].buffer_num, port[0].buffer_size)
554 | if not self.pool:
555 | raise PiCameraRuntimeError("Couldn't create pool for overlay")
556 |
557 | self.update(source)
558 |
559 | def close(self):
560 | super(PiOverlayRenderer, self).close()
561 | if self.pool:
562 | mmal.mmal_pool_destroy(self.pool)
563 | self.pool = None
564 |
565 | def update(self, source):
566 | """
567 | Update the overlay with a new source of data.
568 |
569 | The new *source* buffer must have the same size as the original buffer
570 | used to create the overlay. There is currently no method for changing
571 | the size of an existing overlay (remove and recreate the overlay if you
572 | require this).
573 | """
574 | port = self.renderer[0].input[0]
575 | fmt = port[0].format
576 | bp = ct.c_uint8 * (fmt[0].es[0].video.width * fmt[0].es[0].video.height * 3)
577 | try:
578 | sp = bp.from_buffer(source)
579 | except TypeError:
580 | sp = bp.from_buffer_copy(source)
581 | buf = mmal.mmal_queue_get(self.pool[0].queue)
582 | if not buf:
583 | raise PiCameraRuntimeError(
584 | "Couldn't get a buffer from the overlay's pool")
585 | ct.memmove(buf[0].data, sp, buf[0].alloc_size)
586 | buf[0].length = buf[0].alloc_size
587 | mmal_check(
588 | mmal.mmal_port_send_buffer(port, buf),
589 | prefix="Unable to send a buffer to the overlay's port")
590 |
591 |
592 | class PiPreviewRenderer(PiRenderer):
593 | """
594 | Represents an MMAL renderer which uses the camera's preview as a source.
595 |
596 | This class descends from :class:`PiRenderer` and adds an MMAL connection to
597 | connect the renderer to an MMAL port. The *source* parameter specifies the
598 | MMAL port to connect to the renderer.
599 |
600 | The *layer*, *alpha*, *fullscreen*, and *window* parameters are the same
601 | as in :class:`PiRenderer`.
602 | """
603 |
604 | def __init__(
605 | self, parent, source, layer=2, alpha=255, fullscreen=True,
606 | window=None, crop=None, rotation=0, vflip=False, hflip=False):
607 | super(PiPreviewRenderer, self).__init__(
608 | parent, layer, alpha, fullscreen, window, crop,
609 | rotation, vflip, hflip)
610 | self.connection = self.parent._connect_ports(
611 | source, self.renderer[0].input[0])
612 |
613 | def close(self):
614 | if self.connection:
615 | mmal.mmal_connection_destroy(self.connection)
616 | self.connection = None
617 | super(PiPreviewRenderer, self).close()
618 |
619 |
620 | class PiNullSink(object):
621 | """
622 | Implements an MMAL null-sink which can be used in place of a renderer.
623 |
624 | The *parent* parameter specifies the :class:`~picamera.camera.PiCamera`
625 | instance which constructed this null-sink. The *source* parameter specifies
626 | the MMAL port which the null-sink should connect to its input.
627 |
628 | The null-sink can act as a drop-in replacement for :class:`PiRenderer` in
629 | most cases, but obviously doesn't implement attributes like ``alpha``,
630 | ``layer``, etc. as it simply dumps any incoming frames. This is also the
631 | reason that this class doesn't derive from :class:`PiRenderer` like all
632 | other classes in this module.
633 | """
634 |
635 | def __init__(self, parent, source):
636 | self.parent = parent
637 | self.renderer = ct.POINTER(mmal.MMAL_COMPONENT_T)()
638 | mmal_check(
639 | mmal.mmal_component_create(
640 | mmal.MMAL_COMPONENT_DEFAULT_NULL_SINK, self.renderer),
641 | prefix="Failed to create null sink component")
642 | try:
643 | if not self.renderer[0].input_num:
644 | raise PiCameraError("No input ports on null sink component")
645 | mmal_check(
646 | mmal.mmal_component_enable(self.renderer),
647 | prefix="Null sink component couldn't be enabled")
648 | except:
649 | mmal.mmal_component_destroy(self.renderer)
650 | raise
651 | self.connection = self.parent._connect_ports(
652 | source, self.renderer[0].input[0])
653 |
654 | def close(self):
655 | """
656 | Finalizes the null-sink and deallocates all structures.
657 |
658 | This method is called by the camera prior to destroying the null-sink
659 | (or more precisely, letting it go out of scope to permit the garbage
660 | collector to destroy it at some future time).
661 | """
662 | if self.connection:
663 | mmal.mmal_connection_destroy(self.connection)
664 | self.connection = None
665 | if self.renderer:
666 | mmal.mmal_component_destroy(self.renderer)
667 | self.renderer = None
668 |
669 | def __enter__(self):
670 | return self
671 |
672 | def __exit__(self, exc_type, exc_value, exc_tb):
673 | self.close()
674 |
675 |
676 |
--------------------------------------------------------------------------------
/picamera/streams.py:
--------------------------------------------------------------------------------
1 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
2 | #
3 | # Python camera library for the Rasperry-Pi camera module
4 | # Copyright (c) 2013-2015 Dave Jones
5 | #
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright
10 | # notice, this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the copyright holder nor the
15 | # names of its contributors may be used to endorse or promote products
16 | # derived from this software without specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 |
30 | """
31 | The streams module defines stream classes suited to generating certain types of
32 | camera output (beyond those provided by Python by default). Currently, this
33 | consists primarily of :class:`~PiCameraCircularIO`.
34 |
35 | .. note::
36 |
37 | All classes in this module are available from the :mod:`picamera` namespace
38 | without having to import :mod:`picamera.streams` directly.
39 |
40 | The following classes are defined in the module:
41 |
42 |
43 | PiCameraCircularIO
44 | ==================
45 |
46 | .. autoclass:: PiCameraCircularIO
47 | :members:
48 |
49 |
50 | CircularIO
51 | ==========
52 |
53 | .. autoclass:: CircularIO
54 | :members:
55 |
56 | """
57 |
58 | from __future__ import (
59 | unicode_literals,
60 | print_function,
61 | division,
62 | absolute_import,
63 | )
64 |
65 | # Make Py2's str equivalent to Py3's
66 | str = type('')
67 |
68 |
69 | import io
70 | from threading import RLock
71 | from collections import deque
72 |
73 | from picamera.exc import PiCameraValueError
74 | from picamera.encoders import PiVideoFrame
75 |
76 |
77 | __all__ = [
78 | 'CircularIO',
79 | 'PiCameraCircularIO',
80 | ]
81 |
82 |
83 | class CircularIO(io.IOBase):
84 | """
85 | A thread-safe stream which uses a ring buffer for storage.
86 |
87 | CircularIO provides an in-memory stream similar to the :class:`io.BytesIO`
88 | class. However, unlike BytesIO its underlying storage is a `ring buffer`_
89 | with a fixed maximum size. Once the maximum size is reached, writing
90 | effectively loops round to the beginning to the ring and starts overwriting
91 | the oldest content.
92 |
93 | The *size* parameter specifies the maximum size of the stream in bytes. The
94 | :meth:`read`, :meth:`tell`, and :meth:`seek` methods all operate
95 | equivalently to those in :class:`io.BytesIO` whilst :meth:`write` only
96 | differs in the wrapping behaviour described above. A :meth:`read1` method
97 | is also provided for efficient reading of the underlying ring buffer in
98 | write-sized chunks (or less).
99 |
100 | A re-entrant threading lock guards all operations, and is accessible for
101 | external use via the :attr:`lock` attribute.
102 |
103 | The performance of the class is geared toward faster writing than reading
104 | on the assumption that writing will be the common operation and reading the
105 | rare operation (a reasonable assumption for the camera use-case, but not
106 | necessarily for more general usage).
107 |
108 | .. _ring buffer: http://en.wikipedia.org/wiki/Circular_buffer
109 | """
110 | def __init__(self, size):
111 | if size < 1:
112 | raise ValueError('size must be a positive integer')
113 | self._lock = RLock()
114 | self._data = deque()
115 | self._size = size
116 | self._length = 0
117 | self._pos = 0
118 | self._pos_index = 0
119 | self._pos_offset = 0
120 |
121 | @property
122 | def lock(self):
123 | """
124 | A re-entrant threading lock which is used to guard all operations.
125 | """
126 | return self._lock
127 |
128 | @property
129 | def size(self):
130 | """
131 | Return the maximum size of the buffer in bytes.
132 | """
133 | return self._size
134 |
135 | def readable(self):
136 | """
137 | Returns ``True``, indicating that the stream supports :meth:`read`.
138 | """
139 | return True
140 |
141 | def writable(self):
142 | """
143 | Returns ``True``, indicating that the stream supports :meth:`write`.
144 | """
145 | return True
146 |
147 | def seekable(self):
148 | """
149 | Returns ``True``, indicating the stream supports :meth:`seek` and
150 | :meth:`tell`.
151 | """
152 | return True
153 |
154 | def getvalue(self):
155 | """
156 | Return ``bytes`` containing the entire contents of the buffer.
157 | """
158 | with self.lock:
159 | return b''.join(self._data)
160 |
161 | def _set_pos(self, value):
162 | self._pos = value
163 | self._pos_index = -1
164 | self._pos_offset = chunk_pos = 0
165 | for self._pos_index, chunk in enumerate(self._data):
166 | if chunk_pos + len(chunk) > value:
167 | self._pos_offset = value - chunk_pos
168 | return
169 | else:
170 | chunk_pos += len(chunk)
171 | self._pos_index += 1
172 | self._pos_offset = value - chunk_pos
173 |
174 | def tell(self):
175 | """
176 | Return the current stream position.
177 | """
178 | return self._pos
179 |
180 | def seek(self, offset, whence=io.SEEK_SET):
181 | """
182 | Change the stream position to the given byte *offset*. *offset* is
183 | interpreted relative to the position indicated by *whence*. Values for
184 | *whence* are:
185 |
186 | * ``SEEK_SET`` or ``0`` – start of the stream (the default); *offset*
187 | should be zero or positive
188 |
189 | * ``SEEK_CUR`` or ``1`` – current stream position; *offset* may be
190 | negative
191 |
192 | * ``SEEK_END`` or ``2`` – end of the stream; *offset* is usually
193 | negative
194 |
195 | Return the new absolute position.
196 | """
197 | with self.lock:
198 | if whence == io.SEEK_CUR:
199 | offset = self._pos + offset
200 | elif whence == io.SEEK_END:
201 | offset = self._length + offset
202 | if offset < 0:
203 | raise ValueError(
204 | 'New position is before the start of the stream')
205 | self._set_pos(offset)
206 | return self._pos
207 |
208 | def read(self, n=-1):
209 | """
210 | Read up to *n* bytes from the stream and return them. As a convenience,
211 | if *n* is unspecified or -1, :meth:`readall` is called. Fewer than *n*
212 | bytes may be returned if there are fewer than *n* bytes from the
213 | current stream position to the end of the stream.
214 |
215 | If 0 bytes are returned, and *n* was not 0, this indicates end of the
216 | stream.
217 | """
218 | if n == -1:
219 | return self.readall()
220 | else:
221 | with self.lock:
222 | if self._pos == self._length:
223 | return b''
224 | from_index, from_offset = self._pos_index, self._pos_offset
225 | self._set_pos(self._pos + n)
226 | result = self._data[from_index][from_offset:from_offset + n]
227 | # Bah ... can't slice a deque
228 | for i in range(from_index + 1, self._pos_index):
229 | result += self._data[i]
230 | if from_index < self._pos_index < len(self._data):
231 | result += self._data[self._pos_index][:self._pos_offset]
232 | return result
233 |
234 | def readall(self):
235 | """
236 | Read and return all bytes from the stream until EOF, using multiple
237 | calls to the stream if necessary.
238 | """
239 | return self.read(self._length - self._pos)
240 |
241 | def read1(self, n=-1):
242 | """
243 | Read up to *n* bytes from the stream using only a single call to the
244 | underlying object.
245 |
246 | In the case of :class:`CircularIO` this roughly corresponds to
247 | returning the content from the current position up to the end of the
248 | write that added that content to the stream (assuming no subsequent
249 | writes overwrote the content). :meth:`read1` is particularly useful
250 | for efficient copying of the stream's content.
251 | """
252 | with self.lock:
253 | if self._pos == self._length:
254 | return b''
255 | chunk = self._data[self._pos_index]
256 | if n == -1:
257 | n = len(chunk) - self._pos_offset
258 | result = chunk[self._pos_offset:self._pos_offset + n]
259 | self._pos += len(result)
260 | self._pos_offset += n
261 | if self._pos_offset >= len(chunk):
262 | self._pos_index += 1
263 | self._pos_offset = 0
264 | return result
265 |
266 | def truncate(self, size=None):
267 | """
268 | Resize the stream to the given *size* in bytes (or the current position
269 | if *size* is not specified). This resizing can extend or reduce the
270 | current stream size. In case of extension, the contents of the new file
271 | area will be NUL (``\\x00``) bytes. The new stream size is returned.
272 |
273 | The current stream position isn’t changed unless the resizing is
274 | expanding the stream, in which case it may be set to the maximum stream
275 | size if the expansion causes the ring buffer to loop around.
276 | """
277 | with self.lock:
278 | if size is None:
279 | size = self._pos
280 | if size < 0:
281 | raise ValueError('size must be zero, or a positive integer')
282 | if size > self._length:
283 | # Backfill the space between stream end and current position
284 | # with NUL bytes
285 | fill = b'\x00' * (size - self._length)
286 | self._set_pos(self._length)
287 | self.write(fill)
288 | elif size < self._length:
289 | # Lop off chunks until we get to the last one at the truncation
290 | # point, and slice that one
291 | save_pos = self._pos
292 | self._set_pos(size)
293 | while self._pos_index < len(self._data) - 1:
294 | self._data.pop()
295 | self._data[self._pos_index] = self._data[self._pos_index][:self._pos_offset]
296 | self._length = size
297 | self._pos_index += 1
298 | self._pos_offset = 0
299 | if self._pos != save_pos:
300 | self._set_pos(save_pos)
301 |
302 | def write(self, b):
303 | """
304 | Write the given bytes or bytearray object, *b*, to the underlying
305 | stream and return the number of bytes written.
306 | """
307 | b = bytes(b)
308 | with self.lock:
309 | # Special case: stream position is beyond the end of the stream.
310 | # Call truncate to backfill space first
311 | if self._pos > self._length:
312 | self.truncate()
313 | result = len(b)
314 | if self._pos == self._length:
315 | # Fast path: stream position is at the end of the stream so
316 | # just append a new chunk
317 | self._data.append(b)
318 | self._length += len(b)
319 | self._pos = self._length
320 | self._pos_index = len(self._data)
321 | self._pos_offset = 0
322 | else:
323 | # Slow path: stream position is somewhere in the middle;
324 | # overwrite bytes in the current (and if necessary, subsequent)
325 | # chunk(s), without extending them. If we reach the end of the
326 | # stream, call ourselves recursively to continue down the fast
327 | # path
328 | while b and (self._pos < self._length):
329 | chunk = self._data[self._pos_index]
330 | head = b[:len(chunk) - self._pos_offset]
331 | assert head
332 | b = b[len(head):]
333 | self._data[self._pos_index] = b''.join((
334 | chunk[:self._pos_offset],
335 | head,
336 | chunk[self._pos_offset + len(head):]
337 | ))
338 | self._pos += len(head)
339 | if self._pos_offset + len(head) >= len(chunk):
340 | self._pos_index += 1
341 | self._pos_offset = 0
342 | else:
343 | self._pos_offset += len(head)
344 | if b:
345 | self.write(b)
346 | # If the stream is now beyond the specified size limit, remove
347 | # chunks (or part of a chunk) until the size is within the limit
348 | # again
349 | while self._length > self._size:
350 | chunk = self._data[0]
351 | if self._length - len(chunk) >= self._size:
352 | # Need to remove the entire chunk
353 | self._data.popleft()
354 | self._length -= len(chunk)
355 | self._pos -= len(chunk)
356 | self._pos_index -= 1
357 | # no need to adjust self._pos_offset
358 | else:
359 | # need to remove the head of the chunk
360 | self._data[0] = chunk[self._length - self._size:]
361 | self._pos -= self._length - self._size
362 | self._length = self._size
363 | return result
364 |
365 |
366 | class PiCameraDequeHack(deque):
367 | def __init__(self, camera, splitter_port=1):
368 | super(PiCameraDequeHack, self).__init__()
369 | self.camera = camera
370 | self.splitter_port = splitter_port
371 |
372 | def append(self, item):
373 | encoder = self.camera._encoders[self.splitter_port]
374 | if encoder.frame.complete:
375 | # If the chunk being appended is the end of a new frame, include
376 | # the frame's metadata from the camera
377 | return super(PiCameraDequeHack, self).append((item, encoder.frame))
378 | else:
379 | return super(PiCameraDequeHack, self).append((item, None))
380 |
381 | def pop(self):
382 | return super(PiCameraDequeHack, self).pop()[0]
383 |
384 | def popleft(self):
385 | return super(PiCameraDequeHack, self).popleft()[0]
386 |
387 | def __getitem__(self, index):
388 | return super(PiCameraDequeHack, self).__getitem__(index)[0]
389 |
390 | def __setitem__(self, index, value):
391 | frame = super(PiCameraDequeHack, self).__getitem__(index)[1]
392 | return super(PiCameraDequeHack, self).__setitem__(index, (value, frame))
393 |
394 | def __iter__(self):
395 | for item, frame in super(PiCameraDequeHack, self).__iter__():
396 | yield item
397 |
398 |
399 | class PiCameraDequeFrames(object):
400 | def __init__(self, stream):
401 | super(PiCameraDequeFrames, self).__init__()
402 | self.stream = stream
403 |
404 | def __iter__(self):
405 | with self.stream.lock:
406 | pos = 0
407 | for item, frame in super(PiCameraDequeHack, self.stream._data).__iter__():
408 | pos += len(item)
409 | if frame:
410 | # Rewrite the video_size and split_size attributes according
411 | # to the current position of the chunk
412 | frame = PiVideoFrame(
413 | index=frame.index,
414 | frame_type=frame.frame_type,
415 | frame_size=frame.frame_size,
416 | video_size=pos,
417 | split_size=pos,
418 | timestamp=frame.timestamp,
419 | complete=frame.complete,
420 | )
421 | # Only yield the frame meta-data if the start of the frame
422 | # still exists in the stream
423 | if pos - frame.frame_size >= 0:
424 | yield frame
425 |
426 | def __reversed__(self):
427 | with self.stream.lock:
428 | pos = self.stream._length
429 | for item, frame in super(PiCameraDequeHack, self.stream._data).__reversed__():
430 | if frame:
431 | frame = PiVideoFrame(
432 | index=frame.index,
433 | frame_type=frame.frame_type,
434 | frame_size=frame.frame_size,
435 | video_size=pos,
436 | split_size=pos,
437 | timestamp=frame.timestamp,
438 | complete=frame.complete,
439 | )
440 | if pos - frame.frame_size >= 0:
441 | yield frame
442 | pos -= len(item)
443 |
444 |
445 | class PiCameraCircularIO(CircularIO):
446 | """
447 | A derivative of :class:`CircularIO` which tracks camera frames.
448 |
449 | PiCameraCircularIO provides an in-memory stream based on a ring buffer. It
450 | is a specialization of :class:`CircularIO` which associates video frame
451 | meta-data with the recorded stream, accessible from the :attr:`frames`
452 | property.
453 |
454 | .. warning::
455 |
456 | The class makes a couple of assumptions which will cause the frame
457 | meta-data tracking to break if they are not adhered to:
458 |
459 | * the stream is only ever appended to - no writes ever start from
460 | the middle of the stream
461 |
462 | * the stream is never truncated (from the right; being ring buffer
463 | based, left truncation will occur automatically)
464 |
465 | The *camera* parameter specifies the :class:`~picamera.camera.PiCamera`
466 | instance that will be recording video to the stream. If specified, the
467 | *size* parameter determines the maximum size of the stream in bytes. If
468 | *size* is not specified (or ``None``), then *seconds* must be specified
469 | instead. This provides the maximum length of the stream in seconds,
470 | assuming a data rate in bits-per-second given by the *bitrate* parameter
471 | (which defaults to ``17000000``, or 17Mbps, which is also the default
472 | bitrate used for video recording by :class:`~picamera.camera.PiCamera`).
473 | You cannot specify both *size* and *seconds*.
474 |
475 | The *splitter_port* parameter specifies the port of the built-in splitter
476 | that the video encoder will be attached to. This defaults to ``1`` and most
477 | users will have no need to specify anything different. If you do specify
478 | something else, ensure it is equal to the *splitter_port* parameter of the
479 | corresponding call to :meth:`~picamera.camera.PiCamera.start_recording`.
480 | For example::
481 |
482 | import picamera
483 |
484 | with picamera.PiCamera() as camera:
485 | with picamera.PiCameraCircularIO(camera, splitter_port=2) as stream:
486 | camera.start_recording(stream, format='h264', splitter_port=2)
487 | camera.wait_recording(10, splitter_port=2)
488 | camera.stop_recording(splitter_port=2)
489 |
490 | .. attribute:: frames
491 |
492 | Returns an iterator over the frame meta-data.
493 |
494 | As the camera records video to the stream, the class captures the
495 | meta-data associated with each frame (in the form of a
496 | :class:`~picamera.encoders.PiVideoFrame` tuple), discarding meta-data
497 | for frames which are no longer fully stored within the underlying ring
498 | buffer. You can use the frame meta-data to locate, for example, the
499 | first keyframe present in the stream in order to determine an
500 | appropriate range to extract.
501 | """
502 | def __init__(
503 | self, camera, size=None, seconds=None, bitrate=17000000,
504 | splitter_port=1):
505 | if size is None and seconds is None:
506 | raise PiCameraValueError('You must specify either size, or seconds')
507 | if size is not None and seconds is not None:
508 | raise PiCameraValueError('You cannot specify both size and seconds')
509 | if seconds is not None:
510 | size = bitrate * seconds // 8
511 | super(PiCameraCircularIO, self).__init__(size)
512 | self._data = PiCameraDequeHack(camera, splitter_port)
513 | self.frames = PiCameraDequeFrames(self)
514 |
515 |
--------------------------------------------------------------------------------
/test_ivport.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | import ivport
5 | import time
6 |
7 | def raspistill_capture():
8 | # raspistill capture
9 | def capture(camera):
10 | "This system command for raspistill capture"
11 | cmd = "raspistill -t 10 -o still_CAM%d.jpg" % camera
12 | os.system(cmd)
13 |
14 | iv = ivport.IVPort(ivport.TYPE_DUAL2)
15 | iv.camera_change(1)
16 | capture(1)
17 | iv.camera_change(2)
18 | capture(2)
19 | iv.close()
20 |
21 | def libcamera_still_capture():
22 | # raspistill capture
23 | def capture(camera):
24 | "This system command for raspistill capture"
25 | cmd = "libcamera_still -t 10 -o still_CAM%d.jpg" % camera
26 | os.system(cmd)
27 |
28 | iv = ivport.IVPort(ivport.TYPE_DUAL2)
29 | iv.camera_change(1)
30 | capture(1)
31 | iv.camera_change(2)
32 | capture(2)
33 | iv.close()
34 |
35 | # main capture examples
36 | # all of them are functional
37 | def main():
38 | #raspistill_capture() # legacy camera
39 | libcamera_still_capture() # after Raspberry OS Bullseye
40 | #picam_capture()
41 | #picam_sequence()
42 |
43 | if __name__ == "__main__":
44 | main()
45 |
--------------------------------------------------------------------------------
/test_ivport_quad.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | import ivport
5 | import time
6 |
7 | def picam_sequence():
8 | FRAMES = 30
9 | CAM = 0
10 | def sequence_outputs(iv):
11 | frame = 0
12 | while frame < FRAMES:
13 | camera = (frame%2)+1
14 | time.sleep(0.2) # SD Card Bandwidth Correction Delay
15 | iv.camera_change(camera)
16 | time.sleep(0.2) # SD Card Bandwidth Correction Delay
17 | yield 'sequence_%02d.jpg' % frame
18 | frame += 1
19 | print(camera)
20 |
21 | iv = ivport.IVPort(ivport.TYPE_QUAD2, iv_jumper='A')
22 | iv.camera_open(camera_v2=True, resolution=(640, 480), framerate=60)
23 | #iv.picam.resolution = (640, 480)
24 | #iv.picam.framerate = 30
25 | #time.sleep(1)
26 | iv.camera_sequence(outputs=sequence_outputs(iv), use_video_port=True)
27 | iv.close()
28 |
29 | def picam_capture():
30 | iv = ivport.IVPort(ivport.TYPE_QUAD2, iv_jumper='A')
31 | iv.camera_open()
32 | iv.camera_change(1)
33 | iv.camera_capture("picam", use_video_port=False)
34 | iv.camera_change(2)
35 | iv.camera_capture("picam", use_video_port=False)
36 | iv.close()
37 |
38 | def still_capture():
39 | # raspistill capture
40 | def capture(camera):
41 | "This system command for raspistill capture"
42 | cmd = "raspistill -t 10 -o still_CAM%d.jpg" % camera
43 | os.system(cmd)
44 |
45 | iv = ivport.IVPort(ivport.TYPE_QUAD2, iv_jumper='A')
46 | iv.camera_change(1)
47 | capture(1)
48 | iv.camera_change(2)
49 | capture(2)
50 | iv.close()
51 |
52 | # main capture examples
53 | # all of them are functional
54 | def main():
55 | still_capture()
56 | #picam_capture()
57 | #picam_sequence()
58 |
59 | if __name__ == "__main__":
60 | main()
61 |
--------------------------------------------------------------------------------