30 |
31 |
32 |
--------------------------------------------------------------------------------
/modes/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Decoders for Mode S responses and ADS-B extended squitter messages.
21 | """
22 |
23 | __all__ = ['altitude', 'cpr', 'crc', 'message', 'squawk']
24 |
25 | from . import altitude
26 | from . import cpr
27 | from . import crc
28 | from . import message
29 | from . import squawk
30 |
--------------------------------------------------------------------------------
/mlat-server:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3.4
2 | # -*- mode: python; indent-tabs-mode: nil -*-
3 |
4 | # mlat-server: a Mode S multilateration server
5 | # Copyright (C) 2015 Oliver Jowett
6 |
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU Affero General Public License as
9 | # published by the Free Software Foundation, either version 3 of the
10 | # License, or (at your option) any later version.
11 |
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU Affero General Public License for more details.
16 |
17 | # You should have received a copy of the GNU Affero General Public License
18 | # along with this program. If not, see .
19 |
20 | import logging
21 | import mlat.server.main
22 |
23 | if __name__ == '__main__':
24 | logging.basicConfig(level=logging.INFO,
25 | style='{',
26 | format='{asctime}.{msecs:03.0f} {levelname:8s} {name:20s} {message}',
27 | datefmt='%Y%m%d %H:%M:%S')
28 |
29 | mlat.server.main.MlatServer().run()
30 |
--------------------------------------------------------------------------------
/mlat/constants.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Useful constants for unit conversion.
21 | """
22 |
23 | import math
24 |
25 | # signal propagation speed in metres per second
26 | Cair = 299792458 / 1.0003
27 |
28 | # degrees to radians
29 | DTOR = math.pi / 180.0
30 | # radians to degrees
31 | RTOD = 180.0 / math.pi
32 |
33 | # feet to metres
34 | FTOM = 0.3038
35 | # metres to feet
36 | MTOF = 1.0/FTOM
37 |
38 | # m/s to knots
39 | MS_TO_KTS = 1.9438
40 |
41 | # m/s to fpm
42 | MS_TO_FPM = MTOF * 60
43 |
--------------------------------------------------------------------------------
/mlat/server/config.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """Poor man's configuration system, because I'm lazy.."""
20 |
21 | from mlat import constants
22 |
23 | # Location at which _this copy_ of the server code may be found. This URL will
24 | # be sent to network clients. Remember to uncomment this after updating it.
25 | #
26 | # See COPYING and README.md - the AGPL requires that you make your
27 | # modified version of the server source code available to users that interact
28 | # with the server over a network.
29 | #
30 | # Please remember that this needs to be _the code that the server is running_.
31 | #
32 | # AGPL_SERVER_CODE_URL = "https://github.com/mutability/mlat-server"
33 |
34 | # minimum NUCp value to accept as a sync message
35 | MIN_NUC = 6
36 |
37 | # absolute maximum receiver range for sync messages, metres
38 | MAX_RANGE = 500e3
39 |
40 | # maximum distance between even/odd DF17 messages, metres
41 | MAX_INTERMESSAGE_RANGE = 10e3
42 |
43 | # absolute maximum altitude, metres
44 | MAX_ALT = 50000 * constants.FTOM
45 |
46 | # how long to wait to accumulate messages before doing multilateration, seconds
47 | MLAT_DELAY = 2.5
48 |
49 | # maxfev (maximum function evaluations) for the solver
50 | SOLVER_MAXFEV = 50
51 |
52 | if 'AGPL_SERVER_CODE_URL' not in globals():
53 | raise RuntimeError('Please update AGPL_SERVER_CODE_URL in mlat/server/config.py')
54 |
--------------------------------------------------------------------------------
/mlat/server/util.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Random utilities that don't fit elsewhere.
21 | """
22 |
23 | import random
24 | import asyncio
25 | import logging
26 |
27 |
28 | def fuzzy(t):
29 | return round(random.uniform(0.9*t, 1.1*t), 0)
30 |
31 | completed_future = asyncio.Future()
32 | completed_future.set_result(True)
33 |
34 |
35 | def safe_wait(coros_or_futures, **kwargs):
36 | """Return a future that waits for all coroutines/futures in the given
37 | list to complete. Equivalent to asyncio.wait, except that the list may
38 | safely contain None (these values are ignored) or be entirely empty. If
39 | there is nothing to wait for, an already-completed future is returned."""
40 |
41 | l = []
42 | for coro_or_future in coros_or_futures:
43 | if coro_or_future is not None:
44 | l.append(coro_or_future)
45 |
46 | if l:
47 | return asyncio.wait(l, **kwargs)
48 | else:
49 | return completed_future
50 |
51 |
52 | class TaggingLogger(logging.LoggerAdapter):
53 | def process(self, msg, kwargs):
54 | if 'tag' in self.extra:
55 | return ('[{tag}] {0}'.format(msg, **self.extra), kwargs)
56 | else:
57 | return (msg, kwargs)
58 |
59 |
60 | def setproctitle(title):
61 | """Set the process title. This implementation does nothing."""
62 | pass
63 |
64 |
65 | try:
66 | # If the setproctitle module is available, use that.
67 | from setproctitle import setproctitle # noqa
68 | except ImportError:
69 | pass
70 |
--------------------------------------------------------------------------------
/tools/sync/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | sync stats
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | This table shows the current clock synchronization state between all receivers. It will automatically update about every 30 seconds.
15 |
16 |
17 |
18 | Each receiver pair has two main values: the number of synchronizations done in the last approx 30 seconds (larger is better)
19 | and the estimated synchronization error in microseconds (smaller is better)
20 |
21 |
22 |
23 | The third value (at the bottom of each cell) is the relative frequency offset of the receiver clocks, in PPM.
24 | This is mostly just for interest. If you have values getting close to 200 it can be a problem, as the server will reject differences >200PPM -
25 | better fix your dump1090 --ppm setting!
26 |
27 |
28 |
29 | Green cells are good, yellow cells are OK, red cells are bad. Grey cells mean there is no synchronization available between that pair of receivers.
30 | (Sorry if you're colorblind! The colors are all configured in the stylesheet - send me better suggestions?)
31 |
32 |
33 |
34 | It's normal to have quite a few yellow cells for number of synchronizations (usually this is infrequent synchronization between distant receivers).
35 |
36 |
37 |
38 | Yellow cells for synchronization errors are uncommon - usually either the clock error is very good or very bad, there's not much middle ground.
39 |
40 |
41 |
42 | Red cells for synchronization errors usually indicate a clock problem.
43 | If there's only one in a row/column it's probably a one-off outlier.
44 | If a whole row/column goes red that usually indicates clock instability in that receiver.
45 | Receiver pairs that have red cells are not used for multilateration.
46 |
47 |
48 |
49 | The PPM values are relative values. They don't tell you anything about the true offset. However: the Vsky receiver is a GPS-synchronized
50 | Radarcape that should have an accurate frequency; so the offsets between Vsky and other receivers should be close to the true offsets.
51 |
52 |
53 |
--------------------------------------------------------------------------------
/mlat/server/connection.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Connection interface description.
21 | """
22 |
23 |
24 | class Connection(object):
25 | """Interface for receiver connections.
26 |
27 | A receiver connection is something that can send messages (filter requests,
28 | multilateration results) to a particular receiver. A single connection
29 | may handle only a single receiver, or may multiplex multiple receivers.
30 |
31 | This is a duck-typed interface, implementations are not required to inherit
32 | this class as long as they provide methods with equivalent signatures.
33 | """
34 |
35 | def request_traffic(self, receiver, icao_set):
36 | """Request that a receiver starts sending traffic for exactly
37 | the given set of aircraft only.
38 |
39 | receiver: the handle of the concerned receiver
40 | icao_set: a set of ICAO addresses (as ints) to send (_not_ copied, don't modify afterwards!)
41 | """
42 | raise NotImplementedError
43 |
44 | def report_mlat_position(self, receiver,
45 | receive_timestamp, address, ecef, ecef_cov, receivers, distinct):
46 | """Report a multilaterated position result.
47 |
48 | receiver: the handle of the concerned receiver
49 | receive_timestamp: the approx UTC time of the position
50 | address: the ICAO address of the aircraft (as an int)
51 | ecef: an (x,y,z) tuple giving the position in ECEF coordinates
52 | ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
53 | receivers: the set of receivers that contributed to the result
54 | distinct: the number of distinct receivers (<= len(receivers))
55 | """
56 | raise NotImplementedError
57 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # mlat-server
2 |
3 | This is a Mode S multilateration server that is designed to operate with
4 | clients that do _not_ have synchronized clocks.
5 |
6 | It uses ADS-B aircraft that are transmitting DF17 extended squitter position
7 | messages as reference beacons and uses the relative arrival times of those
8 | messages to model the clock characteristics of each receiver.
9 |
10 | Then it does multilateration of aircraft that are transmitting only Mode S
11 | using the same receivers.
12 |
13 | ## License
14 |
15 | It is important that you read this section before using or modifying the server!
16 |
17 | The server code is licensed under the Affero GPL v3. This license is similar
18 | to the GPL v3, but it has an additional requirement that you must provide
19 | source code to _users who access the server over a network_.
20 |
21 | So if you are planning to operate a copy of this server, you must release any
22 | modifications you make to the source code to your users, even if you wouldn't
23 | normally distribute it.
24 |
25 | If you are not willing to distribute your changes, you have three options:
26 |
27 | * Contact the copyright holder (Oliver) to discuss a separate license for
28 | the server code; or
29 | * Don't allow anyone else to connect to your server, i.e. run only your
30 | own receivers; or
31 | * Don't use this server as a basis for your work at all.
32 |
33 | The server will automatically provide details of the AGPL license and a link
34 | to the server code, to each client that connects. This is configured in
35 | mlat/config.py. If you make modifications, the suggested process is:
36 |
37 | * Put the modified source code somewhere public (github may be simplest).
38 | * Update the URL configured in mlat/config.py to point to your modified code.
39 |
40 | None of this requires that you make your server publically accessible. If you
41 | want to run a private server with a closed user group, that's fine. But you
42 | must still make the source code for your modified server available to your
43 | users, and they may redistribute it further if they wish.
44 |
45 | ## Prerequisites
46 |
47 | * Python 3.4 or later. You need the asyncio module which was introduced in 3.4.
48 | * Numpy and Scipy
49 | * pygraph (https://github.com/pmatiello/python-graph)
50 | * pykalman (https://github.com/pykalman/pykalman)
51 | * optionally, objgraph (https://mg.pov.lt/objgraph/) for leak checking
52 |
53 | ## Developer-ware
54 |
55 | It's all poorly documented and you need to understand quite a bit of the
56 | underlying mathematics of multilateration to make sense of it. Don't expect
57 | to just fire this up and have it all work perfectly first time. You will have
58 | to hack on the code.
59 |
60 | ## Running
61 |
62 | $ mlat-server --help
63 |
64 | ## Clients
65 |
66 | You need a bunch of receivers running mlat-client:
67 | https://github.com/mutability/mlat-client
68 |
69 | ## Output
70 |
71 | Results get passed back to the clients that contributed to the positions.
72 | You can also emit all positions to a local feed, see the command-line help.
73 |
--------------------------------------------------------------------------------
/modes/crc.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Calculates the 24-bit CRC used in Mode S messages.
21 | """
22 |
23 |
24 | # on my system, the generic version is fractionally slower than an unrolled
25 | # version, but the difference is eaten by the cost of having a wrapper to
26 | # decide which version to use. So let's just do this the simple way.
27 | def residual(payload):
28 | """Computes the 24-bit Mode S CRC residual for a message.
29 |
30 | The CRC residual is the CRC computed across the first 4 or 11 bytes,
31 | XOR-ed with the CRC value stored in the final 3 bytes.
32 |
33 | For a message using Address/Parity, the expected residual is the
34 | transmitter's address.
35 |
36 | For a message using Parity/Interrogator, the expected residual is
37 | the interrogator ID.
38 |
39 | For an extended squitter message or a DF11 acquisition squitter, the
40 | expected residual is zero.
41 |
42 | Errors in the message or in the CRC value itself will appear as errors
43 | in the residual value.
44 | """
45 |
46 | t = _crc_table
47 | rem = t[payload[0]]
48 | for b in payload[1:-3]:
49 | rem = ((rem & 0xFFFF) << 8) ^ t[b ^ (rem >> 16)]
50 |
51 | rem = rem ^ (payload[-3] << 16) ^ (payload[-2] << 8) ^ (payload[-1])
52 | return rem
53 |
54 |
55 | def _make_table():
56 | # precompute the CRC table
57 | t = []
58 |
59 | poly = 0xfff409
60 | for i in range(256):
61 | c = i << 16
62 | for j in range(8):
63 | if c & 0x800000:
64 | c = (c << 1) ^ poly
65 | else:
66 | c = (c << 1)
67 |
68 | t.append(c & 0xffffff)
69 |
70 | return t
71 |
72 | if __name__ == '__main__':
73 | _crc_table = _make_table()
74 |
75 | print('# -*- mode: python; indent-tabs-mode: nil -*-')
76 | print('# generated by modes.crc: python3 -m modes.crc')
77 | print()
78 | print('table = (')
79 | for i in range(0, 256, 8):
80 | print(' ' + ', '.join(['0x{0:06x}'.format(c) for c in _crc_table[i:i+8]]) + ',')
81 | print(')')
82 | else:
83 | try:
84 | from .crc_lookup import table as _crc_table
85 | except ImportError:
86 | _crc_table = _make_table()
87 |
--------------------------------------------------------------------------------
/modes/squawk.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Decoder for 12-bit squawk (identity) fields contained in some Mode S messages.
21 | """
22 |
23 | __all__ = ('decode_id13',)
24 |
25 |
26 | def _make_upper_table():
27 | ut = []
28 | for i in range(64):
29 | v = 0
30 | id13 = i << 7
31 | if id13 & 0x1000:
32 | v |= 0x0010 # C1
33 | if id13 & 0x0800:
34 | v |= 0x1000 # A1
35 | if id13 & 0x0400:
36 | v |= 0x0020 # C2
37 | if id13 & 0x0200:
38 | v |= 0x2000 # A2
39 | if id13 & 0x0100:
40 | v |= 0x0040 # C4
41 | if id13 & 0x0080:
42 | v |= 0x4000 # A4
43 | ut.append(v)
44 | return ut
45 |
46 |
47 | def _make_lower_table():
48 | lt = []
49 | for id13 in range(64):
50 | v = 0
51 | # 0040 unused (M/X)
52 | if id13 & 0x0020:
53 | v |= 0x0100 # B1
54 | if id13 & 0x0010:
55 | v |= 0x0001 # D1/Q
56 | if id13 & 0x0008:
57 | v |= 0x0200 # B2
58 | if id13 & 0x0004:
59 | v |= 0x0002 # D2
60 | if id13 & 0x0002:
61 | v |= 0x0400 # B4
62 | if id13 & 0x0001:
63 | v |= 0x0004 # D4
64 | lt.append(v)
65 |
66 | return lt
67 |
68 |
69 | def decode_id13(id13):
70 | """Decode a 13-bit Mode A squawk.
71 |
72 | The expected ordering is that from Annex 10 vol 4 3.1.2.6.7.1:
73 |
74 | C1, A1, C2, A2, C4, A4, ZERO, B1, D1, B2, D2, B4, D4
75 |
76 | Returns the squawk as a 4-character string."""
77 |
78 | return '{0:04x}'.format(_lt[id13 & 63] | _ut[id13 >> 7])
79 |
80 |
81 | if __name__ == '__main__':
82 | _lt = _make_lower_table()
83 | _ut = _make_upper_table()
84 |
85 | print('# -*- mode: python; indent-tabs-mode: nil -*-')
86 | print('# generated by modes.squawk: python3 -m modes.squawk')
87 | print()
88 | print('lt = (')
89 | for i in range(0, len(_lt), 8):
90 | print(' ' + ', '.join(['0x{0:04x}'.format(v) for v in _lt[i:i+8]]) + ',')
91 | print(')')
92 | print()
93 | print('ut = (')
94 | for i in range(0, len(_ut), 8):
95 | print(' ' + ', '.join(['0x{0:04x}'.format(v) for v in _ut[i:i+8]]) + ',')
96 | print(')')
97 |
98 | else:
99 | try:
100 | from .squawk_lookup import ut as _ut, lt as _lt
101 | except ImportError:
102 | _lt = _make_lower_table()
103 | _ut = _make_upper_table()
104 |
--------------------------------------------------------------------------------
/mlat/geodesy.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Utility functions to convert between coordinate systems and calculate distances.
21 | """
22 |
23 | import math
24 | from . import constants
25 |
26 | # WGS84 ellipsoid Earth parameters
27 | WGS84_A = 6378137.0
28 | WGS84_F = 1.0/298.257223563
29 | WGS84_B = WGS84_A * (1 - WGS84_F)
30 | WGS84_ECC_SQ = 1 - WGS84_B * WGS84_B / (WGS84_A * WGS84_A)
31 | WGS84_ECC = math.sqrt(WGS84_ECC_SQ)
32 |
33 | # Average radius for a spherical Earth
34 | SPHERICAL_R = 6371e3
35 |
36 | # Some derived values
37 | _wgs84_ep = math.sqrt((WGS84_A**2 - WGS84_B**2) / WGS84_B**2)
38 | _wgs84_ep2_b = _wgs84_ep**2 * WGS84_B
39 | _wgs84_e2_a = WGS84_ECC_SQ * WGS84_A
40 |
41 |
42 | def llh2ecef(llh):
43 | """Converts from WGS84 lat/lon/height to ellipsoid-earth ECEF"""
44 |
45 | lat = llh[0] * constants.DTOR
46 | lng = llh[1] * constants.DTOR
47 | alt = llh[2]
48 |
49 | slat = math.sin(lat)
50 | slng = math.sin(lng)
51 | clat = math.cos(lat)
52 | clng = math.cos(lng)
53 |
54 | d = math.sqrt(1 - (slat * slat * WGS84_ECC_SQ))
55 | rn = WGS84_A / d
56 |
57 | x = (rn + alt) * clat * clng
58 | y = (rn + alt) * clat * slng
59 | z = (rn * (1 - WGS84_ECC_SQ) + alt) * slat
60 |
61 | return (x, y, z)
62 |
63 |
64 | def ecef2llh(ecef):
65 | "Converts from ECEF to WGS84 lat/lon/height"
66 |
67 | x, y, z = ecef
68 |
69 | lon = math.atan2(y, x)
70 |
71 | p = math.sqrt(x**2 + y**2)
72 | th = math.atan2(WGS84_A * z, WGS84_B * p)
73 | lat = math.atan2(z + _wgs84_ep2_b * math.sin(th)**3,
74 | p - _wgs84_e2_a * math.cos(th)**3)
75 |
76 | N = WGS84_A / math.sqrt(1 - WGS84_ECC_SQ * math.sin(lat)**2)
77 | alt = p / math.cos(lat) - N
78 |
79 | return (lat * constants.RTOD, lon * constants.RTOD, alt)
80 |
81 |
82 | def greatcircle(p0, p1):
83 | """Returns a great-circle distance in metres between two LLH points,
84 | _assuming spherical earth_ and _ignoring altitude_. Don't use this if you
85 | need a distance accurate to better than 1%."""
86 |
87 | lat0 = p0[0] * constants.DTOR
88 | lon0 = p0[1] * constants.DTOR
89 | lat1 = p1[0] * constants.DTOR
90 | lon1 = p1[1] * constants.DTOR
91 | return SPHERICAL_R * math.acos(
92 | math.sin(lat0) * math.sin(lat1) +
93 | math.cos(lat0) * math.cos(lat1) * math.cos(abs(lon0 - lon1)))
94 |
95 |
96 | # direct implementation here turns out to be _much_ faster (10-20x) compared to
97 | # scipy.spatial.distance.euclidean or numpy-based approaches
98 | def ecef_distance(p0, p1):
99 | """Returns the straight-line distance in metres between two ECEF points."""
100 | return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2 + (p0[2] - p1[2])**2)
101 |
--------------------------------------------------------------------------------
/mlat/profile.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-client - an ADS-B multilateration client.
4 | # Copyright 2015, Oliver Jowett
5 | #
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | import os
20 |
21 | # NB: This requires Python 3.3 when MLAT_CPU_PROFILE is set.
22 |
23 |
24 | if not int(os.environ.get('MLAT_CPU_PROFILE', '0')):
25 | enabled = False
26 |
27 | def trackcpu(f, **kwargs):
28 | return f
29 |
30 | def dump_cpu_profiles(tofile=None):
31 | pass
32 | else:
33 | import sys
34 | import time
35 | import operator
36 | import functools
37 |
38 | _cpu_tracking = []
39 | print('CPU profiling enabled', file=sys.stderr)
40 | enabled = True
41 | baseline_cpu = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
42 | baseline_wall = time.monotonic()
43 |
44 | def trackcpu(f, name=None, **kwargs):
45 | if name is None:
46 | name = f.__module__ + '.' + f.__qualname__
47 |
48 | print('Profiling:', name, file=sys.stderr)
49 | tracking = [name, 0, 0.0]
50 | _cpu_tracking.append(tracking)
51 |
52 | @functools.wraps(f)
53 | def cpu_measurement_wrapper(*args, **kwargs):
54 | start = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
55 | try:
56 | return f(*args, **kwargs)
57 | finally:
58 | end = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
59 | tracking[1] += 1
60 | tracking[2] += (end - start)
61 |
62 | return cpu_measurement_wrapper
63 |
64 | def dump_cpu_profiles(tofile=sys.stderr):
65 | elapsed_cpu = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID) - baseline_cpu
66 | elapsed_wall = time.monotonic() - baseline_wall
67 |
68 | print('Elapsed: {wall:.1f} CPU: {cpu:.1f} ({percent:.0f}%)'.format(
69 | wall=elapsed_wall,
70 | cpu=elapsed_cpu,
71 | percent=100.0 * elapsed_cpu / elapsed_wall), file=tofile)
72 | print('{rank:4s} {name:60s} {count:6s} {persec:6s} {total:8s} {each:8s} {fraction:6s}'.format(
73 | rank='#',
74 | name='Function',
75 | count='Calls',
76 | persec='(/sec)',
77 | total='Total(s)',
78 | each='Each(us)',
79 | fraction="Frac"), file=tofile)
80 |
81 | rank = 1
82 | for name, count, total in sorted(_cpu_tracking, key=operator.itemgetter(2), reverse=True):
83 | if count == 0:
84 | break
85 |
86 | print('{rank:4d} {name:60s} {count:6d} {persec:6.1f} {total:8.3f} {each:8.0f} {fraction:6.1f}'.format(
87 | rank=rank,
88 | name=name,
89 | count=count,
90 | persec=1.0 * count / elapsed_wall,
91 | total=total,
92 | each=total * 1e6 / count,
93 | fraction=100.0 * total / elapsed_cpu), file=tofile)
94 | rank += 1
95 |
96 | tofile.flush()
97 |
--------------------------------------------------------------------------------
/tools/coverage/overlay.js:
--------------------------------------------------------------------------------
1 | var overlays = {};
2 | var markers = {};
3 | var map = null;
4 |
5 | function addButtonFor(station, val) {
6 | var template = document.getElementById("station-template");
7 | var clone = template.cloneNode(true);
8 | clone.classList.remove("hidden");
9 | var button = clone.querySelector("button");
10 | button.innerHTML = station;
11 | button.addEventListener("click", selectOverlay.bind(undefined, val));
12 | document.getElementById("station-table-body").appendChild(clone);
13 | }
14 |
15 | function addBlankRow() {
16 | var template = document.getElementById("station-template");
17 | var clone = template.cloneNode(true);
18 | clone.classList.remove("hidden");
19 | var button = clone.querySelector("button");
20 | button.parentNode.removeChild(button);
21 | document.getElementById("station-table-body").appendChild(clone);
22 | }
23 |
24 | function initialize() {
25 | document.getElementById("date_start").innerHTML = first_position;
26 | document.getElementById("date_end").innerHTML = last_position;
27 | document.getElementById("num_pos").innerHTML = num_positions;
28 |
29 | map = new google.maps.Map(document.getElementById('map-canvas'));
30 |
31 | var absbounds = null;
32 |
33 | addButtonFor("All coverage", "all");
34 | addBlankRow();
35 | addButtonFor("4+ station overlap", "4plus");
36 | addButtonFor("5+ station overlap", "5plus");
37 | addButtonFor("6+ station overlap", "6plus");
38 | addButtonFor("Below 18000ft", "below18000");
39 | addButtonFor("Below 10000ft", "below10000");
40 | addButtonFor("Min altitude seen", "byalt");
41 | addBlankRow();
42 |
43 | var names = Object.keys(coverage).sort();
44 | for (var i = 0; i < names.length; ++i) {
45 | var k = names[i];
46 | var s = coverage[k];
47 | var bounds = new google.maps.LatLngBounds(
48 | new google.maps.LatLng(s.min_lat, s.min_lon),
49 | new google.maps.LatLng(s.max_lat, s.max_lon));
50 |
51 | overlays[k] = new google.maps.GroundOverlay(
52 | s.image,
53 | bounds,
54 | { opacity : 1.0 });
55 |
56 | if (absbounds === null) {
57 | absbounds = new google.maps.LatLngBounds(bounds.getSouthWest(), bounds.getNorthEast());
58 | } else {
59 | absbounds.union(bounds);
60 | }
61 |
62 | if (s.lat !== null) {
63 | // marker jitter is just to separate markers that would otherwise be overlapping
64 | markers[k] = new google.maps.Marker({
65 | map : map,
66 | position : new google.maps.LatLng(s.lat + Math.random()*0.02-0.01, s.lon + Math.random()*0.02-0.01),
67 | title : s.name
68 | });
69 |
70 | google.maps.event.addListener(markers[k], 'click', selectOverlay.bind(undefined, k))
71 | }
72 |
73 | if (s.is_station) {
74 | addButtonFor(k, k);
75 | }
76 | }
77 |
78 | overlays['all'].setMap(map);
79 | map.fitBounds(absbounds);
80 | }
81 |
82 | var currentOverlay = "all";
83 | function selectOverlay(stationname) {
84 | overlays[currentOverlay].setMap(null);
85 |
86 | if (currentOverlay === stationname) {
87 | stationname = "all";
88 | }
89 |
90 | overlays[stationname].setMap(map);
91 | currentOverlay = stationname;
92 | }
93 |
94 | google.maps.event.addDomListener(window, 'load', initialize);
95 |
--------------------------------------------------------------------------------
/test/kalman_test.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | import os
4 | import json
5 | import sys
6 | import numpy
7 | import tempfile
8 | import subprocess
9 |
10 | from contextlib import closing
11 |
12 | import mlat.geodesy
13 | import mlat.constants
14 | import mlat.kalman
15 |
16 |
17 | class DummyReceiver(object):
18 | def __init__(self, position):
19 | self.position = position
20 |
21 |
22 | def load_data(f, icao):
23 | data = []
24 | line = f.readline()
25 | while line:
26 | try:
27 | state = json.loads(line)
28 | except ValueError:
29 | print('skipped: ' + line)
30 | finally:
31 | line = f.readline()
32 |
33 | if int(state['icao'], 16) != icao:
34 | continue
35 |
36 | timestamp = state['time']
37 | altitude = state['altitude']
38 | distinct = state['distinct']
39 | ecef = numpy.array(state['ecef'])
40 | ecef_cov = numpy.array(state['ecef_cov']).reshape((3, 3))
41 | cluster = [(DummyReceiver((x, y, z)), t/1e6, v/1e12) for x, y, z, t, v in state['cluster']]
42 |
43 | data.append((timestamp, cluster, altitude, ecef, ecef_cov, distinct))
44 |
45 | return data
46 |
47 |
48 | def run_filter(data, filterstate, outpng):
49 | with closing(tempfile.NamedTemporaryFile(mode='wt', prefix='raw_', suffix='.tsv', delete=False)) as raw_tsv:
50 | with closing(tempfile.NamedTemporaryFile(mode='wt', prefix='filter_', suffix='.tsv', delete=False)) as filter_tsv:
51 | for timestamp, cluster, altitude, ecef, ecef_cov, distinct in data:
52 | llh = mlat.geodesy.ecef2llh(ecef)
53 | print('{t}\t{llh[0]:.4f}\t{llh[1]:.4f}\t{llh[2]:.0f}'.format(t=timestamp, llh=llh), file=raw_tsv)
54 | filterstate.update(timestamp, cluster, altitude, ecef, ecef_cov, distinct)
55 | if filterstate.position is not None:
56 | print('{t}\t{llh[0]:.4f}\t{llh[1]:.4f}\t{llh[2]:.0f}\t{speed:.0f}\t{pe:.0f}\t{ve:.0f}'.format(
57 | t=timestamp,
58 | llh=filterstate.position_llh,
59 | speed=filterstate.ground_speed,
60 | pe=filterstate.position_error,
61 | ve=filterstate.velocity_error),
62 | file=filter_tsv)
63 |
64 | with closing(tempfile.NamedTemporaryFile(mode='wt', prefix='gnuplot_', suffix='.cmd', delete=False)) as gnuplot_script:
65 | print("""
66 | set terminal png size 800,800;
67 | set output "{outpng}";
68 |
69 | plot "{raw_tsv.name}" using 3:2 title "least-squares", "{filter_tsv.name}" using 3:2 with lines lt 3 title "Kalman";
70 |
71 | set output "err_{outpng}";
72 | plot [:] [0:2000] "{filter_tsv.name}" using 1:6 with lines title "position error", "" using 1:7 with lines lt 3 title "velocity error";
73 |
74 | set output "speed_{outpng}";
75 | plot [:] [0:] "{filter_tsv.name}" using 1:5 with lines title "groundspeed";
76 | """.format(outpng=outpng,
77 | raw_tsv=raw_tsv,
78 | filter_tsv=filter_tsv),
79 | file=gnuplot_script)
80 |
81 | subprocess.check_call(["gnuplot", gnuplot_script.name])
82 |
83 | os.unlink(gnuplot_script.name)
84 | os.unlink(filter_tsv.name)
85 | os.unlink(raw_tsv.name)
86 |
87 | if __name__ == '__main__':
88 | icao = int(sys.argv[1], 16)
89 | with closing(open('pseudoranges.json')) as f:
90 | data = load_data(f, icao)
91 |
92 | for pn in (0.01, 0.02, 0.04, 0.06, 0.08, 0.10, 0.12, 0.14, 0.16, 0.18, 0.20):
93 | print(pn)
94 |
95 | filt = mlat.kalman.KalmanStateCA(icao)
96 | filt.process_noise = pn
97 | filt.min_tracking_receivers = 4
98 | filt.outlier_mahalanobis_distance = 10.0
99 | run_filter(data, filt, "kalman_ca_{pn:.2f}.png".format(pn=pn))
100 |
101 | filt = mlat.kalman.KalmanStateCV(icao)
102 | filt.process_noise = pn
103 | filt.min_tracking_receivers = 4
104 | filt.outlier_mahalanobis_distance = 10.0
105 | run_filter(data, filt, "kalman_cv_{pn:.2f}.png".format(pn=pn))
106 |
--------------------------------------------------------------------------------
/modes/altitude.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Decoders for the 12- and 13-bit altitude encodings used in Mode S responses
21 | and ADS-B extended squitter messages.
22 | """
23 |
24 | __all__ = ('decode_ac12', 'decode_ac13')
25 |
26 |
27 | def _decode_ac13(ac13):
28 | if ac13 is None or ac13 == 0: # no data
29 | return None
30 | if ac13 & 0x0040: # M bit set
31 | return None
32 | if ac13 & 0x0010: # Q bit set
33 | n = ((ac13 & 0x1f80) >> 2) | ((ac13 & 0x0020) >> 1) | (ac13 & 0x000f)
34 | return n * 25 - 1000
35 |
36 | # convert from Gillham code
37 | if not (ac13 & 0x1500):
38 | return None # illegal C bits
39 |
40 | h = 0
41 | if ac13 & 0x1000:
42 | h ^= 7 # C1
43 | if ac13 & 0x0400:
44 | h ^= 3 # C2
45 | if ac13 & 0x0100:
46 | h ^= 1 # C4
47 | if h & 5:
48 | h ^= 5
49 | if h > 5:
50 | return None # illegal C bits
51 |
52 | f = 0
53 | if ac13 & 0x0010:
54 | f ^= 0x1ff # D1
55 | if ac13 & 0x0004:
56 | f ^= 0x0ff # D2
57 | if ac13 & 0x0001:
58 | f ^= 0x07f # D4
59 | if ac13 & 0x0800:
60 | f ^= 0x03f # A1
61 | if ac13 & 0x0200:
62 | f ^= 0x01f # A2
63 | if ac13 & 0x0080:
64 | f ^= 0x00f # A4
65 | if ac13 & 0x0020:
66 | f ^= 0x007 # B1
67 | if ac13 & 0x0008:
68 | f ^= 0x003 # B2
69 | if ac13 & 0x0002:
70 | f ^= 0x001 # B4
71 |
72 | if f & 1:
73 | h = (6 - h)
74 |
75 | a = 500 * f + 100 * h - 1300
76 | if a < -1200:
77 | return None # illegal value
78 |
79 | return a
80 |
81 |
82 | def decode_ac13(ac13):
83 | """Decodes a Mode S 13-bit altitude field.
84 |
85 | The expected ordering is as specified in §3.1.2.6.5.4 of Annex 10:
86 |
87 | C1, A1, C2, A2, C4, A4, (M), B1, (Q), B2, D2, B4, D4
88 |
89 | Returns signed altitude in feet, or None if not decodable.
90 | """
91 |
92 | if ac13 is None:
93 | return None
94 | return _alt_table[ac13 & 0x1fff]
95 |
96 |
97 | def decode_ac12(ac12):
98 | """Decode a 12-bit AC altitude field from an extended squitter.
99 |
100 | The expected ordering is as specified in Doc 9871 Table A-2-5:
101 |
102 | the altitude code (AC) as specified in §3.1.2.6.5.4 of Annex 10,
103 | but with the M-bit removed
104 |
105 | Returns signed altitude in feet, or None if not a valid altitude."""
106 |
107 | if ac12 is None:
108 | return None
109 | return _alt_table[((ac12 & 0x0fc0) << 1) | (ac12 & 0x003f)]
110 |
111 |
112 | def _make_table():
113 | # precompute the lookup table
114 | return [_decode_ac13(i) for i in range(2**13)]
115 |
116 |
117 | if __name__ == '__main__':
118 | _alt_table = _make_table()
119 |
120 | print('# -*- mode: python; indent-tabs-mode: nil -*-')
121 | print('# generated by modes.altitude: python3 -m modes.altitude')
122 | print()
123 | print('table = (')
124 | for i in range(0, 2**13, 8):
125 | print(' ' + ''.join(['{0:8s}'.format(repr(a)+', ') for a in _alt_table[i:i+8]]))
126 | print(')')
127 |
128 | else:
129 | try:
130 | from .altitude_lookup import table as _alt_table
131 | except ImportError:
132 | _alt_table = _make_table()
133 |
--------------------------------------------------------------------------------
/mlat/server/leakcheck.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 |
20 | # Derived from (and uses) objgraph, which has licence:
21 |
22 | # Copyright (c) 2008-2015 Marius Gedminas and contributors
23 | # Released under the MIT licence.
24 |
25 | # Permission is hereby granted, free of charge, to any person obtaining a
26 | # copy of this software and associated documentation files (the "Software"),
27 | # to deal in the Software without restriction, including without limitation
28 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
29 | # and/or sell copies of the Software, and to permit persons to whom the
30 | # Software is furnished to do so, subject to the following conditions:
31 | #
32 | # The above copyright notice and this permission notice shall be included in
33 | # all copies or substantial portions of the Software.
34 | #
35 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
36 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
37 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
38 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
39 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
40 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
41 | # DEALINGS IN THE SOFTWARE.
42 |
43 | """
44 | Simple periodic memory leak checker.
45 | """
46 |
47 | import asyncio
48 | import gc
49 | import logging
50 | import operator
51 |
52 | from mlat.server import util
53 |
54 | try:
55 | import objgraph
56 | except ImportError:
57 | objgraph = None
58 |
59 |
60 | class LeakChecker(object):
61 | def __init__(self):
62 | self.logger = logging.getLogger("leaks")
63 | self._task = None
64 | self.peak = {}
65 |
66 | def start(self):
67 | if objgraph is None:
68 | self.logger.warning("Leak checking disabled (objgraph not available)")
69 | else:
70 | self._task = asyncio.async(self.checker())
71 |
72 | return util.completed_future
73 |
74 | def close(self):
75 | if self._task:
76 | self._task.cancel()
77 |
78 | @asyncio.coroutine
79 | def wait_closed(self):
80 | yield from util.safe_wait([self._task])
81 |
82 | @asyncio.coroutine
83 | def checker(self):
84 | yield from asyncio.sleep(120.0) # let startup settle
85 |
86 | gc.collect()
87 | self.check_leaks(suppress=True)
88 |
89 | while True:
90 | yield from asyncio.sleep(3600.0)
91 |
92 | try:
93 | gc.collect()
94 | self.show_hogs()
95 | self.check_leaks()
96 | except Exception:
97 | self.logger.exception("leak checking failed")
98 |
99 | def check_leaks(self, suppress=False, limit=20):
100 | stats = objgraph.typestats(shortnames=False)
101 | deltas = {}
102 | for name, count in stats.items():
103 | old_count = self.peak.get(name, 0)
104 | if count > old_count:
105 | deltas[name] = count - old_count
106 | self.peak[name] = count
107 |
108 | deltas = sorted(deltas.items(), key=operator.itemgetter(1), reverse=True)
109 | deltas = deltas[:limit]
110 |
111 | if not suppress:
112 | if deltas:
113 | self.logger.info("Peak memory usage change:")
114 | width = max(len(name) for name, count in deltas)
115 | for name, delta in deltas:
116 | self.logger.info(' %-*s%9d %+9d' % (width, name, stats[name], delta))
117 |
118 | def show_hogs(self, limit=20):
119 | self.logger.info("Top memory hogs:")
120 | stats = objgraph.most_common_types(limit=limit, shortnames=False)
121 | width = max(len(name) for name, count in stats)
122 | for name, count in stats:
123 | self.logger.info(' %-*s %i' % (width, name, count))
124 |
--------------------------------------------------------------------------------
/modes/cpr.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Decoder for the Compact Position Reporting (CPR) position encoding used in
21 | ADS-B extended squitter messages.
22 | """
23 |
24 | import math
25 | import bisect
26 |
27 | __all__ = ['decode']
28 |
29 |
30 | nl_table = (
31 | (10.47047130, 59),
32 | (14.82817437, 58),
33 | (18.18626357, 57),
34 | (21.02939493, 56),
35 | (23.54504487, 55),
36 | (25.82924707, 54),
37 | (27.93898710, 53),
38 | (29.91135686, 52),
39 | (31.77209708, 51),
40 | (33.53993436, 50),
41 | (35.22899598, 49),
42 | (36.85025108, 48),
43 | (38.41241892, 47),
44 | (39.92256684, 46),
45 | (41.38651832, 45),
46 | (42.80914012, 44),
47 | (44.19454951, 43),
48 | (45.54626723, 42),
49 | (46.86733252, 41),
50 | (48.16039128, 40),
51 | (49.42776439, 39),
52 | (50.67150166, 38),
53 | (51.89342469, 37),
54 | (53.09516153, 36),
55 | (54.27817472, 35),
56 | (55.44378444, 34),
57 | (56.59318756, 33),
58 | (57.72747354, 32),
59 | (58.84763776, 31),
60 | (59.95459277, 30),
61 | (61.04917774, 29),
62 | (62.13216659, 28),
63 | (63.20427479, 27),
64 | (64.26616523, 26),
65 | (65.31845310, 25),
66 | (66.36171008, 24),
67 | (67.39646774, 23),
68 | (68.42322022, 22),
69 | (69.44242631, 21),
70 | (70.45451075, 20),
71 | (71.45986473, 19),
72 | (72.45884545, 18),
73 | (73.45177442, 17),
74 | (74.43893416, 16),
75 | (75.42056257, 15),
76 | (76.39684391, 14),
77 | (77.36789461, 13),
78 | (78.33374083, 12),
79 | (79.29428225, 11),
80 | (80.24923213, 10),
81 | (81.19801349, 9),
82 | (82.13956981, 8),
83 | (83.07199445, 7),
84 | (83.99173563, 6),
85 | (84.89166191, 5),
86 | (85.75541621, 4),
87 | (86.53536998, 3),
88 | (87.00000000, 2),
89 | (90.00000000, 1)
90 | )
91 |
92 | nl_lats = [x[0] for x in nl_table]
93 | nl_vals = [x[1] for x in nl_table]
94 |
95 |
96 | def NL(lat):
97 | if lat < 0:
98 | lat = -lat
99 |
100 | nl = nl_vals[bisect.bisect_left(nl_lats, lat)]
101 | return nl
102 |
103 |
104 | def MOD(a, b):
105 | r = a % b
106 | if r < 0:
107 | r += b
108 | return r
109 |
110 |
111 | def decode(latE, lonE, latO, lonO):
112 | """Perform globally unambiguous position decoding for a pair of
113 | airborne CPR messages.
114 |
115 | latE, lonE: the raw latitude and longitude values of the even message
116 | latO, lonO: the raw latitude and longitude values of the odd message
117 |
118 | Return a tuple of (even latitude, even longitude, odd latitude, odd longitude)
119 |
120 | Raises ValueError if the messages do not produce a useful position."""
121 |
122 | # Compute the Latitude Index "j"
123 | j = math.floor(((59 * latE - 60 * latO) / 131072.0) + 0.5)
124 | rlatE = (360.0 / 60.0) * (MOD(j, 60) + latE / 131072.0)
125 | rlatO = (360.0 / 59.0) * (MOD(j, 59) + latO / 131072.0)
126 |
127 | # adjust for southern hemisphere values, which are in the range (270,360)
128 | if rlatE >= 270:
129 | rlatE -= 360
130 | if rlatO >= 270:
131 | rlatO -= 360
132 |
133 | # Check to see that the latitude is in range: -90 .. +90
134 | if rlatE < -90 or rlatE > 90 or rlatO < -90 or rlatO > 90:
135 | raise ValueError('latitude out of range')
136 |
137 | # Find latitude zone, abort if the two positions are not in the same zone
138 | nl = NL(rlatE)
139 | if nl != NL(rlatO):
140 | raise ValueError('messages lie in different latitude zones')
141 |
142 | # Compute n(i)
143 | nE = nl
144 | nO = max(1, nl - 1)
145 |
146 | # Compute the Longitude Index "m"
147 | m = math.floor((((lonE * (nl - 1)) - (lonO * nl)) / 131072.0) + 0.5)
148 |
149 | # Compute global longitudes
150 | rlonE = (360.0 / nE) * (MOD(m, nE) + lonE / 131072.0)
151 | rlonO = (360.0 / nO) * (MOD(m, nO) + lonO / 131072.0)
152 |
153 | # Renormalize to -180 .. +180
154 | rlonE -= math.floor((rlonE + 180) / 360) * 360
155 | rlonO -= math.floor((rlonO + 180) / 360) * 360
156 |
157 | return (rlatE, rlonE, rlatO, rlonO)
158 |
--------------------------------------------------------------------------------
/mlat/server/solver.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | The bit where all the magic happens: take a list of receive timestamps and
21 | produce a position.
22 | """
23 |
24 | import logging
25 | import math
26 |
27 | import scipy.optimize
28 |
29 | from mlat import geodesy, constants, profile
30 | from mlat.server import config
31 |
32 | # The core of it all. Not very big, is it?
33 | # (Admittedly the entire least-squares solver is hidden within scipy..)
34 |
35 | glogger = logging.getLogger("solver")
36 |
37 |
38 | def _residuals(x_guess, pseudorange_data, altitude, altitude_error):
39 | """Return an array of residuals for a position guess at x_guess versus
40 | actual measurements pseudorange_data and altitude."""
41 |
42 | (*position_guess, offset) = x_guess
43 |
44 | res = []
45 |
46 | # compute pseudoranges at the current guess vs. measured pseudorange
47 | for receiver_position, pseudorange, error in pseudorange_data:
48 | pseudorange_guess = geodesy.ecef_distance(receiver_position, position_guess) - offset
49 | res.append((pseudorange - pseudorange_guess) / error)
50 |
51 | # compute altitude at the current guess vs. measured altitude
52 | if altitude is not None:
53 | _, _, altitude_guess = geodesy.ecef2llh(position_guess)
54 | res.append((altitude - altitude_guess) / altitude_error)
55 |
56 | return res
57 |
58 |
59 | @profile.trackcpu
60 | def solve(measurements, altitude, altitude_error, initial_guess):
61 | """Given a set of receive timestamps, multilaterate the position of the transmitter.
62 |
63 | measurements: a list of (receiver, timestamp, error) tuples. Should be sorted by timestamp.
64 | receiver.position should be the ECEF position of the receiver
65 | timestamp should be a reception time in seconds (with an arbitrary epoch)
66 | variance should be the estimated variance of timestamp
67 | altitude: the reported altitude of the transmitter in _meters_, or None
68 | altitude_error: the estimated error in altitude in meters, or None
69 | initial_guess: an ECEF position to start the solver from
70 |
71 | Returns None on failure, or (ecef, ecef_cov) on success, with:
72 |
73 | ecef: the multilaterated ECEF position of the transmitter
74 | ecef_cov: an estimate of the covariance matrix of ecef
75 | """
76 |
77 | if len(measurements) + (0 if altitude is None else 1) < 4:
78 | raise ValueError('Not enough measurements available')
79 |
80 | base_timestamp = measurements[0][1]
81 | pseudorange_data = [(receiver.position,
82 | (timestamp - base_timestamp) * constants.Cair,
83 | math.sqrt(variance) * constants.Cair)
84 | for receiver, timestamp, variance in measurements]
85 | x_guess = [initial_guess[0], initial_guess[1], initial_guess[2], 0.0]
86 | x_est, cov_x, infodict, mesg, ler = scipy.optimize.leastsq(
87 | _residuals,
88 | x_guess,
89 | args=(pseudorange_data, altitude, altitude_error),
90 | full_output=True,
91 | maxfev=config.SOLVER_MAXFEV)
92 |
93 | if ler in (1, 2, 3, 4):
94 | #glogger.info("solver success: {0} {1}".format(ler, mesg))
95 |
96 | # Solver found a result. Validate that it makes
97 | # some sort of physical sense.
98 | (*position_est, offset_est) = x_est
99 |
100 | if offset_est < 0 or offset_est > config.MAX_RANGE:
101 | #glogger.info("solver: bad offset: {0}".formaT(offset_est))
102 | # implausible range offset to closest receiver
103 | return None
104 |
105 | for receiver, timestamp, variance in measurements:
106 | d = geodesy.ecef_distance(receiver.position, position_est)
107 | if d > config.MAX_RANGE:
108 | # too far from this receiver
109 | #glogger.info("solver: bad range: {0}".format(d))
110 | return None
111 |
112 | if cov_x is None:
113 | return position_est, None
114 | else:
115 | return position_est, cov_x[0:3, 0:3]
116 |
117 | else:
118 | # Solver failed
119 | #glogger.info("solver: failed: {0} {1}".format(ler, mesg))
120 | return None
121 |
--------------------------------------------------------------------------------
/mlat/server/net.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Some common networking bits.
21 | """
22 |
23 | import asyncio
24 | import logging
25 | import socket
26 |
27 | from mlat.server import util
28 |
29 |
30 | glogger = logging.getLogger("net")
31 |
32 |
33 | class MonitoringListener(object):
34 | def __init__(self, host, port, factory, logger=glogger, description=None):
35 | if not description:
36 | description = self.__class__.__name__
37 |
38 | self.description = description
39 | self.logger = logger
40 | self.started = False
41 | self.host = host
42 | self.port = port
43 | self.factory = factory
44 | self.tcp_server = None
45 | self.clients = []
46 | self.monitoring = []
47 |
48 | @asyncio.coroutine
49 | def start(self):
50 | if not self.started:
51 | yield from self._start()
52 | self.started = True
53 |
54 | return self
55 |
56 | # override as needed:
57 |
58 | @asyncio.coroutine
59 | def _start(self):
60 | self.tcp_server = yield from asyncio.start_server(self.start_client,
61 | host=self.host,
62 | port=self.port)
63 | for s in self.tcp_server.sockets:
64 | name = s.getsockname()
65 | self.logger.info("{what} listening on {host}:{port} (TCP)".format(host=name[0],
66 | port=name[1],
67 | what=self.description))
68 |
69 | def _new_client(self, r, w):
70 | return self.factory(r, w)
71 |
72 | def _close(self):
73 | if self.tcp_server:
74 | self.tcp_server.close()
75 | for client in self.clients:
76 | client.close()
77 | self.clients.clear()
78 |
79 | # shouldn't need modifying:
80 |
81 | def start_client(self, r, w):
82 | newclient = self._new_client(r, w)
83 | self.clients.append(newclient)
84 | self.monitoring.append(asyncio.async(self.monitor_client(newclient)))
85 |
86 | @asyncio.coroutine
87 | def monitor_client(self, client):
88 | yield from client.wait_closed()
89 | self.clients.remove(client)
90 | self.monitoring.remove(asyncio.Task.current_task())
91 |
92 | def close(self):
93 | if not self.started:
94 | return
95 |
96 | self.started = False
97 | self._close()
98 |
99 | for m in self.monitoring:
100 | m.cancel()
101 | self.monitoring.clear()
102 |
103 | @asyncio.coroutine
104 | def wait_closed(self):
105 | yield from util.safe_wait(self.monitoring)
106 | if self.tcp_server:
107 | yield from self.tcp_server.wait_closed()
108 |
109 |
110 | class MonitoringConnector(object):
111 | def __init__(self, host, port, reconnect_interval, factory):
112 | self.started = False
113 | self.host = host
114 | self.port = port
115 | self.reconnect_interval = reconnect_interval
116 | self.factory = factory
117 | self.reconnect_task = None
118 | self.client = None
119 |
120 | def start(self):
121 | if not self.started:
122 | self.started = True
123 | self.reconnect_task = asyncio.async(self.reconnect())
124 |
125 | return util.completed_future
126 |
127 | @asyncio.coroutine
128 | def reconnect(self):
129 | while True:
130 | try:
131 | reader, writer = yield from asyncio.open_connection(self.host, self.port)
132 | except socket.error:
133 | yield from asyncio.sleep(self.reconnect_interval)
134 | continue
135 |
136 | self.client = self.factory(reader, writer)
137 | yield from self.client.wait_closed()
138 | self.client = None
139 | yield from asyncio.sleep(self.reconnect_interval)
140 |
141 | def close(self):
142 | if not self.started:
143 | return
144 |
145 | self.started = False
146 | self.reconnect_task.cancel()
147 | if self.client:
148 | self.client.close()
149 |
150 | @asyncio.coroutine
151 | def wait_closed(self):
152 | yield from util.safe_wait([self.reconnect_task])
153 | if self.client:
154 | yield from self.client.wait_closed()
155 |
--------------------------------------------------------------------------------
/tools/sync/syncstats.js:
--------------------------------------------------------------------------------
1 | function refresh() {
2 | var xhr = new XMLHttpRequest();
3 | xhr.onreadystatechange = function() {
4 | if (xhr.readyState == 4) {
5 | var stateObj = JSON.parse(xhr.responseText);
6 | rebuildTable(stateObj);
7 | }
8 | };
9 |
10 | var cachebust = new Date().getTime();
11 | xhr.open("GET", "sync.json?" + cachebust, true);
12 | xhr.send();
13 | }
14 |
15 | function rebuildTable(state) {
16 | var table = document.getElementById("syncstatstable");
17 | while (table.firstChild) {
18 | table.removeChild(table.firstChild);
19 | }
20 |
21 | var receivers = Object.keys(state);
22 | receivers.sort();
23 | var header_row = document.createElement('tr');
24 |
25 | var header_td = document.createElement('td');
26 | header_td.innerHTML = " ";
27 | header_row.appendChild(header_td);
28 |
29 | for (var i = 0; i < receivers.length; ++i) {
30 | header_td = document.createElement('td');
31 | header_td.colSpan = "2";
32 | header_td.innerHTML = receivers[i];
33 | header_row.appendChild(header_td);
34 | }
35 |
36 | header_td = document.createElement('td');
37 | header_td.innerHTML = " ";
38 | header_row.appendChild(header_td);
39 |
40 | table.appendChild(header_row);
41 |
42 | for (var i = 0; i < receivers.length; ++i) {
43 | var data_row_1 = document.createElement('tr');
44 | var data_row_2 = document.createElement('tr');
45 |
46 | var header_col = document.createElement('td');
47 | header_col.innerHTML = receivers[i];
48 | header_col.rowSpan = "2";
49 | data_row_1.appendChild(header_col);
50 |
51 | var receiver_state = state[receivers[i]].peers;
52 | for (var j = 0; j < receivers.length; ++j) {
53 | var data_cell;
54 | if (i == j) {
55 | data_cell = document.createElement('td');
56 | data_cell.colSpan = "2";
57 | data_cell.rowSpan = "2";
58 | data_cell.className = "sync_count sync_omit";
59 | data_cell.innerHTML = receivers[i];
60 | data_row_1.appendChild(data_cell);
61 | } else if (receivers[j] in receiver_state) {
62 | var syncstate = receiver_state[receivers[j]];
63 |
64 | data_cell = document.createElement('td');
65 | data_cell.innerHTML = syncstate[0];
66 | if (syncstate[0] >= 10) {
67 | data_cell.className = "sync_count sync_good";
68 | } else {
69 | data_cell.className = "sync_count sync_ok";
70 | }
71 | data_row_1.appendChild(data_cell);
72 |
73 | data_cell = document.createElement('td');
74 | data_cell.innerHTML = syncstate[1].toFixed(1);
75 | if (syncstate[1] <= 2.0) {
76 | data_cell.className = "sync_err sync_good";
77 | } else if (syncstate[1] <= 4.0) {
78 | data_cell.className = "sync_err sync_ok";
79 | } else {
80 | data_cell.className = "sync_err sync_bad";
81 | }
82 | data_row_1.appendChild(data_cell);
83 |
84 | data_cell = document.createElement('td')
85 | data_cell.innerHTML = syncstate[2].toFixed(2);
86 | if (Math.abs(syncstate[2]) <= 50.0) {
87 | data_cell.className = "sync_ppm sync_good";
88 | } else if (Math.abs(syncstate[2]) <= 180.0) {
89 | data_cell.className = "sync_ppm sync_ok";
90 | } else {
91 | data_cell.className = "sync_ppm sync_bad";
92 | }
93 | data_cell.colSpan = "2";
94 | data_row_2.appendChild(data_cell);
95 |
96 | } else {
97 | data_cell = document.createElement('td');
98 | data_cell.innerHTML = " ";
99 | data_cell.className = "sync_count sync_nodata";
100 | data_row_1.appendChild(data_cell);
101 |
102 | data_cell = document.createElement('td');
103 | data_cell.innerHTML = " ";
104 | data_cell.className = "sync_err sync_nodata";
105 | data_row_1.appendChild(data_cell);
106 |
107 | data_cell = document.createElement('td')
108 | data_cell.innerHTML = " ";
109 | data_cell.className = "sync_ppm sync_nodata";
110 | data_cell.colSpan = "2";
111 | data_row_2.appendChild(data_cell);
112 | }
113 | }
114 |
115 | header_col = document.createElement('td');
116 | header_col.innerHTML = receivers[i];
117 | header_col.rowSpan = "2";
118 | data_row_1.appendChild(header_col);
119 |
120 | table.appendChild(data_row_1);
121 | table.appendChild(data_row_2);
122 | }
123 |
124 | var footer_row = document.createElement('tr');
125 |
126 | var footer_td = document.createElement('td');
127 | footer_td.innerHTML = " ";
128 | footer_row.appendChild(footer_td);
129 |
130 | for (var i = 0; i < receivers.length; ++i) {
131 | footer_td = document.createElement('td');
132 | footer_td.colSpan = "2";
133 | footer_td.innerHTML = receivers[i];
134 | footer_row.appendChild(footer_td);
135 | }
136 |
137 | footer_td = document.createElement('td');
138 | footer_td.innerHTML = " ";
139 | footer_row.appendChild(footer_td);
140 |
141 | table.appendChild(footer_row);
142 |
143 |
144 | }
145 |
146 | window.setInterval(refresh, 5000);
147 |
--------------------------------------------------------------------------------
/mlat/server/tracker.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Maintains state for all aircraft known to some client.
21 | Works out the set of "interesting" aircraft and arranges for clients to
22 | send us traffic for these.
23 | """
24 |
25 | import asyncio
26 | from mlat import profile
27 | from mlat.server import kalman
28 |
29 |
30 | class TrackedAircraft(object):
31 | """A single tracked aircraft."""
32 |
33 | def __init__(self, icao, allow_mlat):
34 | # ICAO address of this aircraft
35 | self.icao = icao
36 |
37 | # Allow mlat of this aircraft?
38 | self.allow_mlat = allow_mlat
39 |
40 | # set of receivers that can see this aircraft.
41 | # invariant: r.tracking.contains(a) iff a.tracking.contains(r)
42 | self.tracking = set()
43 |
44 | # set of receivers who want to use this aircraft for synchronization.
45 | # this aircraft is interesting if this set is non-empty.
46 | # invariant: r.sync_interest.contains(a) iff a.sync_interest.contains(r)
47 | self.sync_interest = set()
48 |
49 | # set of receivers who want to use this aircraft for multilateration.
50 | # this aircraft is interesting if this set has at least three receivers.
51 | # invariant: r.mlat_interest.contains(a) iff a.mlat_interest.contains(r)
52 | self.mlat_interest = set()
53 |
54 | # set of receivers that have contributed to at least one multilateration
55 | # result. This is used to decide who to forward results to.
56 | self.successful_mlat = set()
57 |
58 | # number of mlat message resolves attempted
59 | self.mlat_message_count = 0
60 | # number of mlat messages that produced valid least-squares results
61 | self.mlat_result_count = 0
62 | # number of mlat messages that produced valid kalman state updates
63 | self.mlat_kalman_count = 0
64 |
65 | # last reported altitude (for multilaterated aircraft)
66 | self.altitude = None
67 | # time of last altitude (time.monotonic())
68 | self.last_altitude_time = None
69 |
70 | # last multilateration, time (monotonic)
71 | self.last_result_time = None
72 | # last multilateration, ECEF position
73 | self.last_result_position = None
74 | # last multilateration, variance
75 | self.last_result_var = None
76 | # last multilateration, distinct receivers
77 | self.last_result_distinct = None
78 | # kalman filter state
79 | self.kalman = kalman.KalmanStateCA(self.icao)
80 |
81 | self.callsign = None
82 | self.squawk = None
83 |
84 | @property
85 | def interesting(self):
86 | """Is this aircraft interesting, i.e. should we forward traffic for it?"""
87 | return bool(self.sync_interest or (self.allow_mlat and len(self.mlat_interest) >= 3))
88 |
89 | def __lt__(self, other):
90 | return self.icao < other.icao
91 |
92 |
93 | class Tracker(object):
94 | """Tracks which receivers can see which aircraft, and asks receivers to
95 | forward traffic accordingly."""
96 |
97 | def __init__(self, partition):
98 | self.aircraft = {}
99 | self.partition_id = partition[0] - 1
100 | self.partition_count = partition[1]
101 |
102 | def in_local_partition(self, icao):
103 | if self.partition_count == 1:
104 | return True
105 |
106 | # mix the address a bit
107 | h = icao
108 | h = (((h >> 16) ^ h) * 0x45d9f3b) & 0xFFFFFFFF
109 | h = (((h >> 16) ^ h) * 0x45d9f3b) & 0xFFFFFFFF
110 | h = ((h >> 16) ^ h)
111 | return bool((h % self.partition_count) == self.partition_id)
112 |
113 | def add(self, receiver, icao_set):
114 | for icao in icao_set:
115 | ac = self.aircraft.get(icao)
116 | if ac is None:
117 | ac = self.aircraft[icao] = TrackedAircraft(icao, self.in_local_partition(icao))
118 |
119 | ac.tracking.add(receiver)
120 | receiver.tracking.add(ac)
121 |
122 | def remove(self, receiver, icao_set):
123 | for icao in icao_set:
124 | ac = self.aircraft.get(icao)
125 | if not ac:
126 | continue
127 |
128 | ac.tracking.discard(receiver)
129 | ac.successful_mlat.discard(receiver)
130 | receiver.tracking.discard(ac)
131 | if not ac.tracking:
132 | del self.aircraft[icao]
133 |
134 | def remove_all(self, receiver):
135 | for ac in receiver.tracking:
136 | ac.tracking.discard(receiver)
137 | ac.successful_mlat.discard(receiver)
138 | ac.sync_interest.discard(receiver)
139 | ac.mlat_interest.discard(receiver)
140 | if not ac.tracking:
141 | del self.aircraft[ac.icao]
142 |
143 | receiver.tracking.clear()
144 | receiver.sync_interest.clear()
145 | receiver.mlat_interest.clear()
146 |
147 | @profile.trackcpu
148 | def update_interest(self, receiver):
149 | """Update the interest sets of one receiver based on the
150 | latest tracking and rate report data."""
151 |
152 | if receiver.last_rate_report is None:
153 | # Legacy client, no rate report, we cannot be very selective.
154 | new_sync = {ac for ac in receiver.tracking if len(ac.tracking) > 1}
155 | new_mlat = {ac for ac in receiver.tracking if ac.allow_mlat}
156 | receiver.update_interest_sets(new_sync, new_mlat)
157 | asyncio.get_event_loop().call_later(15.0, receiver.refresh_traffic_requests)
158 | return
159 |
160 | # Work out the aircraft that are transmitting ADS-B that this
161 | # receiver wants to use for synchronization.
162 | ac_to_ratepair_map = {}
163 | ratepair_list = []
164 | for icao, rate in receiver.last_rate_report.items():
165 | if rate < 0.20:
166 | continue
167 |
168 | ac = self.aircraft.get(icao)
169 | if not ac:
170 | continue
171 |
172 | ac_to_ratepair_map[ac] = l = [] # list of (rateproduct, receiver, ac) tuples for this aircraft
173 | for r1 in ac.tracking:
174 | if receiver is r1:
175 | continue
176 |
177 | if r1.last_rate_report is None:
178 | # Receiver that does not produce rate reports, just take a guess.
179 | rate1 = 1.0
180 | else:
181 | rate1 = r1.last_rate_report.get(icao, 0.0)
182 |
183 | rp = rate * rate1 / 4.0
184 | if rp < 0.10:
185 | continue
186 |
187 | ratepair = (rp, r1, ac)
188 | l.append(ratepair)
189 | ratepair_list.append(ratepair)
190 |
191 | ratepair_list.sort()
192 |
193 | ntotal = {}
194 | new_sync_set = set()
195 | for rp, r1, ac in ratepair_list:
196 | if ac in new_sync_set:
197 | continue # already added
198 |
199 | if ntotal.get(r1, 0.0) < 1.0:
200 | # use this aircraft for sync
201 | new_sync_set.add(ac)
202 | # update rate-product totals for all receivers that see this aircraft
203 | for rp2, r2, ac2 in ac_to_ratepair_map[ac]:
204 | ntotal[r2] = ntotal.get(r2, 0.0) + rp2
205 |
206 | # for multilateration we are interesting in
207 | # all aircraft that we are tracking but for
208 | # which we have no ADS-B rate (i.e. are not
209 | # transmitting positions)
210 | new_mlat_set = set()
211 | for ac in receiver.tracking:
212 | if ac.icao not in receiver.last_rate_report and ac.allow_mlat:
213 | new_mlat_set.add(ac)
214 |
215 | receiver.update_interest_sets(new_sync_set, new_mlat_set)
216 | asyncio.get_event_loop().call_later(15.0, receiver.refresh_traffic_requests)
217 |
--------------------------------------------------------------------------------
/tools/coverage/plot-coverage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python2
2 |
3 | import cairo, colorsys
4 | import csv, math, json, time, sys, gzip
5 | from contextlib import closing
6 |
7 | class StationCoverage:
8 | def __init__(self, name, lat, lon, privacy=False, binsize=0.05, alt_mode=False, is_station=True):
9 | self.name = name
10 | self.alt_mode = alt_mode
11 | if lat:
12 | if privacy:
13 | self.station_lat = self.station_lon = None
14 | else:
15 | self.station_lat = round(lat / (binsize/2)) * (binsize/2)
16 | self.station_lon = round(lon / (binsize/2)) * (binsize/2)
17 | else:
18 | self.station_lat = self.station_lon = None
19 |
20 | self.binsize = binsize
21 | self.bins = {}
22 | self.min_lat = self.max_lat = self.min_lon = self.max_lon = None
23 | self.max_count = None
24 | self.is_station = is_station
25 |
26 | def add_position(self, lat, lon, alt, err_est):
27 | bin_lat = math.floor(lat / self.binsize) * self.binsize
28 | bin_lon = math.floor(lon / self.binsize) * self.binsize
29 |
30 | if self.min_lat is None or bin_lat < self.min_lat: self.min_lat = bin_lat
31 | if self.min_lon is None or bin_lon < self.min_lon: self.min_lon = bin_lon
32 | if self.max_lat is None or bin_lat > self.max_lat: self.max_lat = bin_lat
33 | if self.max_lon is None or bin_lon > self.max_lon: self.max_lon = bin_lon
34 |
35 | bin_key = (bin_lat, bin_lon)
36 | if self.alt_mode:
37 | data = self.bins.setdefault(bin_key, [1,99999.0])
38 | data[1] = min(data[1], alt)
39 | else:
40 | data = self.bins.setdefault(bin_key, [0,0.0])
41 | data[0] += 1
42 | data[1] += err_est
43 | if self.max_count is None or data[0] > self.max_count: self.max_count = data[0]
44 |
45 | def write(self, basedir, pngfile, metafile, pixels_per_degree=None):
46 | if len(self.bins) == 0: return
47 |
48 | if not pixels_per_degree: pixels_per_degree = math.ceil(4.0 / self.binsize)
49 |
50 | min_lon = self.min_lon
51 | min_lat = self.min_lat
52 | max_lat = self.max_lat + self.binsize
53 | max_lon = self.max_lon + self.binsize
54 | binsize = self.binsize
55 |
56 | lon_span = (max_lon-min_lon)
57 | lat_span = (max_lat-min_lat)
58 | xsize = int(math.ceil(lon_span*pixels_per_degree))
59 | ysize = int(math.ceil(lat_span*pixels_per_degree))
60 |
61 | surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, xsize, ysize)
62 |
63 | cc = cairo.Context(surface)
64 |
65 | cc.set_operator(cairo.OPERATOR_SOURCE)
66 | cc.set_antialias(cairo.ANTIALIAS_NONE)
67 | cc.scale(1.0 * xsize / lon_span, -1.0 * ysize / lat_span)
68 | cc.translate(-min_lon, -max_lat)
69 |
70 | # transparent background
71 | cc.set_source(cairo.SolidPattern(0,0,0,0))
72 | cc.paint()
73 |
74 | # draw bins
75 | for (bin_lat,bin_lon), (count,val) in self.bins.items():
76 | a = 0.75
77 |
78 | if self.alt_mode:
79 | if val < 2000:
80 | hue = 20
81 | elif val < 10000:
82 | hue = 20 + 120.0 * (val-2000) / 8000.0
83 | elif val < 40000:
84 | hue = 140 + 160 * (val-10000) / 30000.0
85 | else:
86 | hue = 300
87 | else:
88 | err_est = val / count
89 | hue = 150.0 - 150 * ((err_est / 5000.0)**0.5)
90 | if hue < 0:
91 | hue = 0
92 |
93 | r,g,b = colorsys.hls_to_rgb(hue/360.0, 0.5, 1.0)
94 | cc.set_source(cairo.SolidPattern(r,g,b,a))
95 |
96 | cc.move_to(bin_lon, bin_lat)
97 | cc.line_to(bin_lon + binsize, bin_lat)
98 | cc.line_to(bin_lon + binsize, bin_lat + binsize)
99 | cc.line_to(bin_lon, bin_lat + binsize)
100 | cc.close_path()
101 | cc.fill()
102 |
103 | surface.write_to_png(basedir + '/' + pngfile)
104 |
105 | print >>metafile, """
106 | coverage['{name}'] = {{
107 | name: '{name}',
108 | lat: {station_lat},
109 | lon: {station_lon},
110 | min_lat: {min_lat},
111 | min_lon: {min_lon},
112 | max_lat: {max_lat},
113 | max_lon: {max_lon},
114 | image: '{pngfile}',
115 | is_station: {is_station}
116 | }};""".format(name = self.name,
117 | station_lat = (self.station_lat is None) and 'null' or self.station_lat,
118 | station_lon = (self.station_lon is None) and 'null' or self.station_lon,
119 | min_lat = self.min_lat,
120 | min_lon = self.min_lon,
121 | max_lat = max_lat,
122 | max_lon = max_lon,
123 | pngfile = pngfile,
124 | is_station = self.is_station and "true" or "false")
125 |
126 | def multiopen(path):
127 | if path[-3:] == '.gz':
128 | return gzip.open(path, 'rb')
129 | else:
130 | return open(path, 'rb')
131 |
132 | def plot_from_datafile(csvfile, jsonfile, outdir):
133 | station_coverage = {
134 | '*' : StationCoverage('all', None, None, is_station=False),
135 | '4+' : StationCoverage('4plus', None, None, is_station=False),
136 | '5+' : StationCoverage('5plus', None, None, is_station=False),
137 | '6+' : StationCoverage('6plus', None, None, is_station=False),
138 | '10000-' : StationCoverage('below10000', None, None, is_station=False),
139 | '18000-' : StationCoverage('below18000', None, None, is_station=False),
140 | 'byalt' : StationCoverage('byalt', None, None, alt_mode=True, is_station=False)
141 | }
142 |
143 | with closing(multiopen(jsonfile)) as f:
144 | station_data = json.load(f)
145 |
146 | for station_name, station_pos in station_data.items():
147 | station_coverage[station_name] = StationCoverage(station_name, station_pos['lat'], station_pos['lon'], privacy=station_pos['privacy'])
148 |
149 | first = last = None
150 | num_positions = 0
151 | with closing(multiopen(csvfile)) as f:
152 | reader = csv.reader(f)
153 | for row in reader:
154 | try:
155 | t,addr,callsign,squawk,lat,lon,alt,err_est,nstations,ndistinct,stationlist = row[:11]
156 | except ValueError as e:
157 | print 'row', reader.line_num, 'failed: ', str(e)
158 | print repr(row)
159 | continue
160 |
161 | t = float(t)
162 | lat = float(lat)
163 | lon = float(lon)
164 | alt = float(alt)
165 | err_est = max(0, float(err_est))
166 | nstations = int(nstations)
167 | ndistinct = int(ndistinct)
168 |
169 | if not first:
170 | first = last = t
171 | else:
172 | first = min(first,t)
173 | last = max(last,t)
174 |
175 | station_coverage['*'].add_position(lat,lon,alt,err_est)
176 | station_coverage['byalt'].add_position(lat,lon,alt,err_est)
177 | if nstations >= 4:
178 | station_coverage['4+'].add_position(lat,lon,alt,err_est)
179 | if nstations >= 5:
180 | station_coverage['5+'].add_position(lat,lon,alt,err_est)
181 | if nstations >= 6:
182 | station_coverage['6+'].add_position(lat,lon,alt,err_est)
183 | if alt <= 10000:
184 | station_coverage['10000-'].add_position(lat,lon,alt,err_est)
185 | if alt <= 18000:
186 | station_coverage['18000-'].add_position(lat,lon,alt,err_est)
187 |
188 | for s in stationlist.split(','):
189 | sc = station_coverage.get(s)
190 | if not sc:
191 | sc = station_coverage[s] = StationCoverage(s, None, None)
192 | sc.add_position(lat, lon, alt, err_est)
193 |
194 | num_positions += 1
195 |
196 | with closing(open(outdir + '/data.js', 'w')) as metafile:
197 | print >>metafile, "var first_position = '{d}';".format(d = time.strftime("%Y/%m/%d %H:%M:%S UTC", time.gmtime(first)))
198 | print >>metafile, "var last_position = '{d}';".format(d = time.strftime("%Y/%m/%d %H:%M:%S UTC", time.gmtime(last)))
199 | print >>metafile, "var num_positions = {n};".format(n = num_positions)
200 | print >>metafile, "var coverage = {};"
201 | for sc in station_coverage.values():
202 | pngfile = 'coverage_{n}.png'.format(n=sc.name)
203 | #print 'Writing', pngfile
204 | sc.write(basedir = outdir, pngfile = pngfile, metafile = metafile)
205 |
206 | if __name__ == '__main__':
207 | plot_from_datafile(csvfile=sys.argv[1], jsonfile=sys.argv[2], outdir=sys.argv[3])
208 |
--------------------------------------------------------------------------------
/mlat/server/clocknorm.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Clock normalization routines.
21 | """
22 |
23 | import pygraph.classes.graph
24 | import pygraph.algorithms.minmax
25 |
26 | from mlat import profile
27 |
28 |
29 | class _Predictor(object):
30 | """Simple object for holding prediction state"""
31 | def __init__(self, predict, variance):
32 | self.predict = predict
33 | self.variance = variance
34 |
35 |
36 | def _identity_predict(x):
37 | return x
38 |
39 |
40 | def _make_predictors(clocktracker, station0, station1):
41 | """Return a tuple of predictors (p_01, p_10) where:
42 |
43 | p_01 will predict a station1 timestamp given a station0 timestamp
44 | p_10 will predict a station0 timestamp given a station1 timestamp
45 |
46 | Returns None if no suitable clock sync model is available for
47 | this pair of stations.
48 | """
49 |
50 | if station0 is station1:
51 | return None
52 |
53 | if station0.clock.epoch is not None and station0.clock.epoch == station1.clock.epoch:
54 | # Assume clocks are closely synchronized to the epoch (and therefore to each other)
55 | predictor = _Predictor(_identity_predict, station0.clock.jitter ** 2 + station1.clock.jitter ** 2)
56 | return (predictor, predictor)
57 |
58 | if station0 < station1:
59 | pairing = clocktracker.clock_pairs.get((station0, station1))
60 | if pairing is None or not pairing.valid:
61 | return None
62 | return (_Predictor(pairing.predict_peer, pairing.variance),
63 | _Predictor(pairing.predict_base, pairing.variance))
64 | else:
65 | pairing = clocktracker.clock_pairs.get((station1, station0))
66 | if pairing is None or not pairing.valid:
67 | return None
68 | return (_Predictor(pairing.predict_base, pairing.variance),
69 | _Predictor(pairing.predict_peer, pairing.variance))
70 |
71 |
72 | def _label_heights(g, node, heights):
73 | """Label each node in the tree with a root of 'node'
74 | with its height, filling the map 'heights' which
75 | should be initially empty."""
76 |
77 | # we use heights as a visited-map too.
78 | heights[node] = 0
79 | for each in g.neighbors(node):
80 | if each not in heights:
81 | _label_heights(g, each, heights)
82 | mn = heights[each] + g.edge_weight((node, each))
83 | if mn > heights[node]:
84 | heights[node] = mn
85 |
86 |
87 | def _tallest_branch(g, node, heights, ignore=None):
88 | """Find the edge in the tree rooted at 'node' that is part of
89 | the tallest branch. If ignore is not None, ignore that neighbour.
90 | Returns (pathlen,node)"""
91 | tallest = (0, None)
92 |
93 | for each in g.neighbors(node):
94 | if each is ignore:
95 | continue
96 |
97 | eh = heights[each] + g.edge_weight((node, each))
98 | if eh > tallest[0]:
99 | tallest = (eh, each)
100 |
101 | return tallest
102 |
103 |
104 | def _convert_timestamps(g, timestamp_map, predictor_map, node, results, conversion_chain, variance):
105 | """Rewrite node and all unvisited nodes reachable from node using the
106 | chain of clocksync objects in conversion_chain, populating the results dict.
107 |
108 | node: the root node to convert
109 | timestamp_map: dict of node -> [(timestamp, utc), ...] to convert
110 | results: dict of node -> (variance, [(converted timestamp, utc), ...])
111 | conversion_chain: list of predictor tuples to apply to node, in order
112 | variance: the total error introduced by chain: sum([p.variance for p in chain])
113 | """
114 |
115 | # convert our own timestamp using the provided chain
116 | r = []
117 | results[node] = (variance, r) # also used as a visited-map
118 | for ts, utc in timestamp_map[node]:
119 | for predictor in conversion_chain:
120 | ts = predictor.predict(ts)
121 | r.append((ts, utc))
122 |
123 | # convert all reachable unvisited nodes using a conversion to our timestamp
124 | # followed by the provided chain
125 | for neighbor in g.neighbors(node):
126 | if neighbor not in results:
127 | predictor = predictor_map[(neighbor, node)]
128 | _convert_timestamps(g, timestamp_map, predictor_map,
129 | neighbor,
130 | results,
131 | [predictor] + conversion_chain, variance + predictor.variance)
132 |
133 |
134 | @profile.trackcpu
135 | def normalize(clocktracker, timestamp_map):
136 | """
137 | Given {receiver: [(timestamp, utc), ...]}
138 |
139 | return [{receiver: (variance, [(timestamp, utc), ...])}, ...]
140 | where timestamps are normalized to some arbitrary base timescale within each map;
141 | one map is returned per connected subgraph."""
142 |
143 | # Represent the stations as a weighted graph where there
144 | # is an edge between S0 and S1 with weight W if we have a
145 | # sufficiently recent clock correlation between S0 and S1 with
146 | # estimated variance W.
147 | #
148 | # This graph may have multiple disconnected components. Treat
149 | # each separately and do this:
150 | #
151 | # Find the minimal spanning tree of the component. This will
152 | # give us the edges to use to convert between timestamps with
153 | # the lowest total error.
154 | #
155 | # Pick a central node of the MST to use as the the timestamp
156 | # basis, where a central node is a node that minimizes the maximum
157 | # path cost from the central node to any other node in the spanning
158 | # tree.
159 | #
160 | # Finally, convert all timestamps in the tree to the basis of the
161 | # central node.
162 |
163 | # populate initial graph
164 | g = pygraph.classes.graph.graph()
165 | g.add_nodes(timestamp_map.keys())
166 |
167 | # build a weighted graph where edges represent usable clock
168 | # synchronization paths, and the weight of each edge represents
169 | # the estimated variance introducted by converting a timestamp
170 | # across that clock synchronization.
171 |
172 | # also build a map of predictor objects corresponding to the
173 | # edges for later use
174 |
175 | predictor_map = {}
176 | for si in timestamp_map.keys():
177 | for sj in timestamp_map.keys():
178 | if si < sj:
179 | predictors = _make_predictors(clocktracker, si, sj)
180 | if predictors:
181 | predictor_map[(si, sj)] = predictors[0]
182 | predictor_map[(sj, si)] = predictors[1]
183 | g.add_edge((si, sj), wt=predictors[0].variance)
184 |
185 | # find a minimal spanning tree for each component of the graph
186 | mst_forest = pygraph.algorithms.minmax.minimal_spanning_tree(g)
187 |
188 | # rebuild the graph with only the spanning edges, retaining weights
189 | # also note the roots of each tree as we go
190 | g = pygraph.classes.graph.graph()
191 | g.add_nodes(mst_forest.keys())
192 | roots = []
193 | for edge in mst_forest.items():
194 | if edge[1] is None:
195 | roots.append(edge[0])
196 | else:
197 | g.add_edge(edge, wt=predictor_map[edge].variance)
198 |
199 | # for each spanning tree, find a central node and convert timestamps
200 | components = []
201 | for root in roots:
202 | # label heights of nodes, where the height of a node is
203 | # the length of the most expensive path to a child of the node
204 | heights = {}
205 | _label_heights(g, root, heights)
206 |
207 | # Find the longest path in the spanning tree; we want to
208 | # resolve starting at the center of this path, as this minimizes
209 | # the maximum path length to any node
210 |
211 | # find the two tallest branches leading from the root
212 | tall1 = _tallest_branch(g, root, heights)
213 | tall2 = _tallest_branch(g, root, heights, ignore=tall1[1])
214 |
215 | # Longest path is TALL1 - ROOT - TALL2
216 | # We want to move along the path into TALL1 until the distances to the two
217 | # tips of the path are equal length. This is the same as finding a node on
218 | # the path within TALL1 with a height of about half the longest path.
219 | target = (tall1[0] + tall2[0]) / 2
220 | central = root
221 | step = tall1[1]
222 | while step and abs(heights[central] - target) > abs(heights[step] - target):
223 | central = step
224 | _, step = _tallest_branch(g, central, heights, ignore=central)
225 |
226 | # Convert timestamps so they are using the clock units of "central"
227 | # by walking the spanning tree edges. Then finally convert to wallclock
228 | # times as the last step by dividing by the final clock's frequency
229 | results = {}
230 | conversion_chain = [_Predictor(lambda x: x/central.clock.freq, central.clock.jitter**2)]
231 | _convert_timestamps(g, timestamp_map, predictor_map, central, results,
232 | conversion_chain, central.clock.jitter**2)
233 |
234 | components.append(results)
235 |
236 | return components
237 |
--------------------------------------------------------------------------------
/mlat/server/main.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Top level server object, arg parsing, etc.
21 | """
22 |
23 | import asyncio
24 | import logging
25 | import signal
26 | import argparse
27 |
28 | from mlat.server import jsonclient, output, coordinator, leakcheck
29 |
30 |
31 | def hostport(s):
32 | parts = s.split(':')
33 | if len(parts) != 2:
34 | raise argparse.ArgumentTypeError("{} should be in 'host:port' format".format(s))
35 | return (parts[0], int(parts[1]))
36 |
37 |
38 | def port_or_hostport(s):
39 | parts = s.split(':')
40 | if len(parts) == 1:
41 | return ('0.0.0.0', int(parts[0]))
42 | if len(parts) == 2:
43 | return (parts[0], int(parts[1]))
44 |
45 | raise argparse.ArgumentTypeError("{} should be in 'port' or 'host:port' format".format(s))
46 |
47 |
48 | def host_and_ports(s):
49 | try:
50 | parts = s.split(':')
51 | if len(parts) == 1:
52 | return (None, int(parts[0]), None)
53 | if len(parts) == 3:
54 | return (parts[0], int(parts[1]), int(parts[2]))
55 | if len(parts) != 2:
56 | raise ValueError() # provoke ArgumentTypeError below
57 |
58 | # could be host:tcp_port or tcp_port:udp_port
59 | try:
60 | return (None, int(parts[0]), int(parts[1]))
61 | except ValueError:
62 | pass
63 |
64 | return (parts[0], int(parts[1]), None)
65 | except ValueError:
66 | raise argparse.ArgumentTypeError("{} should be in one of these formats: 'tcp_port', 'host:tcp_port', 'tcp_port:udp_port', 'host:tcp_port:udp_port'".format(s)) # noqa
67 |
68 |
69 | def partition_id_and_count(s):
70 | try:
71 | parts = s.split('/')
72 | if len(parts) != 2:
73 | raise ValueError()
74 |
75 | v = (int(parts[0]), int(parts[1]))
76 | if v[0] < 1 or v[0] > v[1]:
77 | raise ValueError()
78 |
79 | return v
80 |
81 | except ValueError:
82 | raise argparse.ArgumentTypeError("{} should be in the form I/N, where N is the total number of partitions and I is the partition for this server (1..N)".format(s)) # noqa
83 |
84 |
85 | class MlatServer(object):
86 | """The multilateration server.
87 |
88 | Derive from this if you want to add options, etc.
89 | """
90 |
91 | def __init__(self):
92 | self.loop = asyncio.get_event_loop()
93 | self.coordinator = None
94 |
95 | def add_client_args(self, parser):
96 | parser.add_argument('--client-listen',
97 | help="listen on a [host:]tcp_port[:udp_port] for connections from multilateration clients. You should have at least one of these!", # noqa
98 | type=host_and_ports,
99 | action='append',
100 | default=[])
101 | parser.add_argument('--motd',
102 | type=str,
103 | help="set the server MOTD sent to clients.",
104 | default="In-development v2 server. Expect odd behaviour.")
105 |
106 | def add_output_args(self, parser):
107 | parser.add_argument('--write-csv',
108 | help="write results in CSV format to a local file.",
109 | action='append',
110 | default=[])
111 |
112 | parser.add_argument('--basestation-connect',
113 | help="connect to a host:port and send Basestation-format results.",
114 | action='append',
115 | type=hostport,
116 | default=[])
117 | parser.add_argument('--basestation-listen',
118 | help="listen on a [host:]port and send Basestation-format results to clients that connect.",
119 | action='append',
120 | type=port_or_hostport,
121 | default=[])
122 |
123 | parser.add_argument('--filtered-basestation-connect',
124 | help="connect to a host:port and send Basestation-format results.",
125 | action='append',
126 | type=hostport,
127 | default=[])
128 | parser.add_argument('--filtered-basestation-listen',
129 | help="listen on a [host:]port and send Basestation-format results to clients that connect.",
130 | action='append',
131 | type=port_or_hostport,
132 | default=[])
133 |
134 | def add_util_args(self, parser):
135 | parser.add_argument('--work-dir',
136 | help="directory for debug/stats output and blacklist",
137 | required=True)
138 |
139 | parser.add_argument('--check-leaks',
140 | help="run periodic memory leak checks (requires objgraph package).",
141 | action='store_true',
142 | default=False)
143 |
144 | parser.add_argument('--dump-pseudorange',
145 | help="dump pseudorange data in json format to a file")
146 |
147 | parser.add_argument('--partition',
148 | help="enable partitioning (n/count)",
149 | type=partition_id_and_count,
150 | default=(1, 1))
151 |
152 | parser.add_argument('--tag',
153 | help="set process name prefix (requires setproctitle module)",
154 | default='mlat-server')
155 |
156 | def make_arg_parser(self):
157 | parser = argparse.ArgumentParser(description="Multilateration server.")
158 |
159 | self.add_client_args(parser.add_argument_group('Client connections'))
160 | self.add_output_args(parser.add_argument_group('Output methods'))
161 | self.add_util_args(parser.add_argument_group('Utility options'))
162 |
163 | return parser
164 |
165 | def make_subtasks(self, args):
166 | return ([self.coordinator] +
167 | self.make_util_subtasks(args) +
168 | self.make_output_subtasks(args) +
169 | self.make_client_subtasks(args))
170 |
171 | def make_client_subtasks(self, args):
172 | subtasks = []
173 |
174 | for host, tcp_port, udp_port in args.client_listen:
175 | subtasks.append(jsonclient.JsonClientListener(host=host,
176 | tcp_port=tcp_port,
177 | udp_port=udp_port,
178 | coordinator=self.coordinator,
179 | motd=args.motd))
180 |
181 | return subtasks
182 |
183 | def make_output_subtasks(self, args):
184 | subtasks = []
185 |
186 | for host, port in args.basestation_connect:
187 | subtasks.append(output.make_basestation_connector(host=host,
188 | port=port,
189 | coordinator=self.coordinator,
190 | use_kalman_data=False))
191 |
192 | for host, port in args.basestation_listen:
193 | subtasks.append(output.make_basestation_listener(host=host,
194 | port=port,
195 | coordinator=self.coordinator,
196 | use_kalman_data=False))
197 |
198 | for host, port in args.filtered_basestation_connect:
199 | subtasks.append(output.make_basestation_connector(host=host,
200 | port=port,
201 | coordinator=self.coordinator,
202 | use_kalman_data=True))
203 |
204 | for host, port in args.filtered_basestation_listen:
205 | subtasks.append(output.make_basestation_listener(host=host,
206 | port=port,
207 | coordinator=self.coordinator,
208 | use_kalman_data=True))
209 |
210 | for filename in args.write_csv:
211 | subtasks.append(output.LocalCSVWriter(coordinator=self.coordinator,
212 | filename=filename))
213 |
214 | return subtasks
215 |
216 | def make_util_subtasks(self, args):
217 | subtasks = []
218 |
219 | if args.check_leaks:
220 | subtasks.append(leakcheck.LeakChecker())
221 |
222 | return subtasks
223 |
224 | def stop(self, msg):
225 | logging.info(msg)
226 | self.loop.stop()
227 |
228 | def run(self):
229 | args = self.make_arg_parser().parse_args()
230 |
231 | self.coordinator = coordinator.Coordinator(work_dir=args.work_dir,
232 | pseudorange_filename=args.dump_pseudorange,
233 | partition=args.partition,
234 | tag=args.tag)
235 |
236 | subtasks = self.make_subtasks(args)
237 |
238 | # Start everything
239 | startup = asyncio.gather(*[x.start() for x in subtasks])
240 | self.loop.run_until_complete(startup)
241 | startup.result() # provoke exceptions if something failed
242 |
243 | self.loop.add_signal_handler(signal.SIGINT, self.stop, "Halting on SIGINT")
244 | self.loop.add_signal_handler(signal.SIGTERM, self.stop, "Halting on SIGTERM")
245 |
246 | self.loop.run_forever() # Well, until stop() is called anyway!
247 |
248 | logging.info("Server shutting down.")
249 |
250 | # Stop everything
251 | for t in reversed(subtasks):
252 | t.close()
253 |
254 | # Wait for completion
255 | shutdown = asyncio.gather(*[t.wait_closed() for t in subtasks], return_exceptions=True)
256 | self.loop.run_until_complete(shutdown)
257 | for e in shutdown.result():
258 | if isinstance(e, Exception) and not isinstance(e, asyncio.CancelledError):
259 | logging.error("Exception thrown during shutdown", exc_info=(type(e), e, e.__traceback__))
260 |
261 | self.loop.close()
262 | logging.info("Server shutdown done.")
263 |
--------------------------------------------------------------------------------
/mlat/server/output.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | import asyncio
20 | import logging
21 | import time
22 | import math
23 | import functools
24 | import socket
25 | import numpy
26 |
27 | from mlat import constants, geodesy
28 | from mlat.server import util, net
29 |
30 | """
31 | Various output methods for multilateration results.
32 | """
33 |
34 |
35 | def format_time(timestamp):
36 | return time.strftime("%H:%M:%S", time.gmtime(timestamp)) + ".{0:03.0f}".format(math.modf(timestamp)[0] * 1000)
37 |
38 |
39 | def format_date(timestamp):
40 | return time.strftime("%Y/%m/%d", time.gmtime(timestamp))
41 |
42 |
43 | def csv_quote(s):
44 | if s is None:
45 | return ''
46 | if s.find('\n') == -1 and s.find('"') == -1 and s.find(',') == -1:
47 | return s
48 | else:
49 | return '"' + s.replace('"', '""') + '"'
50 |
51 |
52 | class LocalCSVWriter(object):
53 | """Writes multilateration results to a local CSV file"""
54 |
55 | TEMPLATE = '{t:.3f},{address:06X},{callsign},{squawk},{lat:.4f},{lon:.4f},{alt:.0f},{err:.0f},{n},{d},{receivers},{dof}\n' # noqa
56 | KTEMPLATE = '{t:.3f},{address:06X},{callsign},{squawk},{lat:.4f},{lon:.4f},{alt:.0f},{err:.0f},{n},{d},{receivers},{dof},{klat:.4f},{klon:.4f},{kalt:.0f},{kheading:.0f},{kspeed:.0f},{kvrate:.0f},{kerr:.0f}\n' # noqa
57 |
58 | def __init__(self, coordinator, filename):
59 | self.logger = logging.getLogger("csv")
60 | self.coordinator = coordinator
61 | self.filename = filename
62 | self.f = open(filename, 'a')
63 | self.coordinator.add_output_handler(self.write_result)
64 | self.coordinator.add_sighup_handler(self.reopen)
65 |
66 | def start(self):
67 | return util.completed_future
68 |
69 | def close(self):
70 | self.coordinator.remove_output_handler(self.write_result)
71 | self.coordinator.remove_sighup_handler(self.reopen)
72 | self.f.close()
73 |
74 | def wait_closed(self):
75 | return util.completed_future
76 |
77 | def reopen(self):
78 | try:
79 | self.f.close()
80 | self.f = open(self.filename, 'a')
81 | self.logger.info("Reopened {filename}".format(filename=self.filename))
82 | except Exception:
83 | self.logger.exception("Failed to reopen {filename}".format(filename=self.filename))
84 |
85 | def write_result(self, receive_timestamp, address, ecef, ecef_cov, receivers, distinct, dof, kalman_state):
86 | try:
87 | lat, lon, alt = geodesy.ecef2llh(ecef)
88 |
89 | ac = self.coordinator.tracker.aircraft[address]
90 | callsign = ac.callsign
91 | squawk = ac.squawk
92 |
93 | if ecef_cov is None:
94 | err_est = -1
95 | else:
96 | var_est = numpy.sum(numpy.diagonal(ecef_cov))
97 | if var_est >= 0:
98 | err_est = math.sqrt(var_est)
99 | else:
100 | err_est = -1
101 |
102 | if kalman_state.valid and kalman_state.last_update >= receive_timestamp:
103 | line = self.KTEMPLATE.format(
104 | t=receive_timestamp,
105 | address=address,
106 | callsign=csv_quote(callsign),
107 | squawk=csv_quote(squawk),
108 | lat=lat,
109 | lon=lon,
110 | alt=alt * constants.MTOF,
111 | err=err_est,
112 | n=len(receivers),
113 | d=distinct,
114 | dof=dof,
115 | receivers=csv_quote(','.join([receiver.uuid for receiver in receivers])),
116 | klat=kalman_state.position_llh[0],
117 | klon=kalman_state.position_llh[1],
118 | kalt=kalman_state.position_llh[2] * constants.MTOF,
119 | kheading=kalman_state.heading,
120 | kspeed=kalman_state.ground_speed * constants.MS_TO_KTS,
121 | kvrate=kalman_state.vertical_speed * constants.MS_TO_FPM,
122 | kerr=kalman_state.position_error)
123 | else:
124 | line = self.TEMPLATE.format(
125 | t=receive_timestamp,
126 | address=address,
127 | callsign=csv_quote(callsign),
128 | squawk=csv_quote(squawk),
129 | lat=lat,
130 | lon=lon,
131 | alt=alt * constants.MTOF,
132 | err=err_est,
133 | n=len(receivers),
134 | d=distinct,
135 | dof=dof,
136 | receivers=csv_quote(','.join([receiver.uuid for receiver in receivers])))
137 |
138 | self.f.write(line)
139 |
140 | except Exception:
141 | self.logger.exception("Failed to write result")
142 | # swallow the exception so we don't affect our caller
143 |
144 |
145 | class BasestationClient(object):
146 | """Writes results in Basestation port-30003 format to network clients."""
147 |
148 | TEMPLATE = 'MSG,{mtype},1,1,{addr:06X},1,{rcv_date},{rcv_time},{now_date},{now_time},{callsign},{altitude},{speed},{heading},{lat},{lon},{vrate},{squawk},{fs},{emerg},{ident},{aog}\n' # noqa
149 |
150 | def __init__(self, reader, writer, *, coordinator, use_kalman_data, heartbeat_interval=30.0):
151 | peer = writer.get_extra_info('peername')
152 | self.host = peer[0]
153 | self.port = peer[1]
154 | self.logger = util.TaggingLogger(logging.getLogger("basestation"),
155 | {'tag': '{host}:{port}'.format(host=self.host,
156 | port=self.port)})
157 | self.reader = reader
158 | self.writer = writer
159 | self.coordinator = coordinator
160 | self.use_kalman_data = use_kalman_data
161 | self.heartbeat_interval = heartbeat_interval
162 | self.last_output = time.monotonic()
163 | self.heartbeat_task = asyncio.async(self.send_heartbeats())
164 | self.reader_task = asyncio.async(self.read_until_eof())
165 |
166 | self.logger.info("Connection established")
167 | self.coordinator.add_output_handler(self.write_result)
168 |
169 | def close(self):
170 | if not self.writer:
171 | return # already closed
172 |
173 | self.logger.info("Connection lost")
174 | self.coordinator.remove_output_handler(self.write_result)
175 | self.heartbeat_task.cancel()
176 | self.writer.close()
177 | self.writer = None
178 |
179 | @asyncio.coroutine
180 | def wait_closed(self):
181 | yield from util.safe_wait([self.heartbeat_task, self.reader_task])
182 |
183 | @asyncio.coroutine
184 | def read_until_eof(self):
185 | try:
186 | while True:
187 | r = yield from self.reader.read(1024)
188 | if len(r) == 0:
189 | self.logger.info("Client EOF")
190 | # EOF
191 | self.close()
192 | return
193 | except socket.error:
194 | self.close()
195 | return
196 |
197 | @asyncio.coroutine
198 | def send_heartbeats(self):
199 | try:
200 | while True:
201 | now = time.monotonic()
202 | delay = self.last_output + self.heartbeat_interval - now
203 | if delay > 0.1:
204 | yield from asyncio.sleep(delay)
205 | continue
206 |
207 | self.writer.write(b'\n')
208 | self.last_output = now
209 |
210 | except socket.error:
211 | self.close()
212 | return
213 |
214 | def write_result(self, receive_timestamp, address, ecef, ecef_cov, receivers, distinct, dof, kalman_data):
215 | try:
216 | if self.use_kalman_data:
217 | if not kalman_data.valid or kalman_data.last_update < receive_timestamp:
218 | return
219 |
220 | lat, lon, alt = kalman_data.position_llh
221 | speed = int(round(kalman_data.ground_speed * constants.MS_TO_KTS))
222 | heading = int(round(kalman_data.heading))
223 | vrate = int(round(kalman_data.vertical_speed * constants.MS_TO_FPM))
224 | else:
225 | lat, lon, alt = geodesy.ecef2llh(ecef)
226 | speed = ''
227 | heading = ''
228 | vrate = ''
229 |
230 | ac = self.coordinator.tracker.aircraft[address]
231 | callsign = ac.callsign
232 | squawk = ac.squawk
233 | altitude = int(round(alt * constants.MTOF))
234 | send_timestamp = time.time()
235 |
236 | line = self.TEMPLATE.format(mtype=3,
237 | addr=address,
238 | rcv_date=format_date(receive_timestamp),
239 | rcv_time=format_time(receive_timestamp),
240 | now_date=format_date(send_timestamp),
241 | now_time=format_time(send_timestamp),
242 | callsign=csv_quote(callsign),
243 | squawk=csv_quote(squawk),
244 | lat=round(lat, 4),
245 | lon=round(lon, 4),
246 | altitude=altitude,
247 | speed=speed,
248 | heading=heading,
249 | vrate=vrate,
250 | fs='',
251 | emerg='',
252 | ident='',
253 | aog='')
254 | self.writer.write(line.encode('ascii'))
255 | self.last_output = time.monotonic()
256 |
257 | except Exception:
258 | self.logger.exception("Failed to write result")
259 | # swallow the exception so we don't affect our caller
260 |
261 |
262 | def make_basestation_listener(host, port, coordinator, use_kalman_data):
263 | factory = functools.partial(BasestationClient,
264 | coordinator=coordinator,
265 | use_kalman_data=use_kalman_data)
266 | return net.MonitoringListener(host, port, factory,
267 | logger=logging.getLogger('basestation'),
268 | description='Basestation output listener')
269 |
270 |
271 | def make_basestation_connector(host, port, coordinator, use_kalman_data):
272 | factory = functools.partial(BasestationClient,
273 | coordinator=coordinator,
274 | use_kalman_data=use_kalman_data)
275 | return net.MonitoringConnector(host, port, 30.0, factory)
276 |
--------------------------------------------------------------------------------
/mlat/server/clocksync.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Maintains clock synchronization between individual pairs of receivers.
21 | """
22 |
23 | import math
24 | import time
25 | import bisect
26 | import logging
27 |
28 | __all__ = ('Clock', 'ClockPairing', 'make_clock')
29 |
30 | glogger = logging.getLogger("clocksync")
31 |
32 |
33 | class Clock(object):
34 | """A particular clock. Stores characteristics of a clock,
35 | and acts as part of the key in the clock pairing map.
36 | """
37 |
38 | def __init__(self, epoch, freq, max_freq_error, jitter):
39 | """Create a new clock representation.
40 |
41 | epoch: a string indicating a fixed epoch, or None if freerunning
42 | freq: the clock frequency in Hz (float)
43 | max_freq_error: the maximum expected relative frequency error (i.e. 1e-6 is 1PPM) (float)
44 | jitter: the expected jitter of a typical reading, in seconds, standard deviation (float)
45 | """
46 | self.epoch = epoch
47 | self.freq = freq
48 | self.max_freq_error = max_freq_error
49 | self.jitter = jitter
50 |
51 |
52 | def make_clock(clock_type):
53 | """Return a new Clock instance for the given clock type."""
54 |
55 | if clock_type == 'radarcape_gps':
56 | return Clock(epoch='gps_midnight', freq=1e9, max_freq_error=1e-6, jitter=15e-9)
57 | if clock_type == 'beast':
58 | return Clock(epoch=None, freq=12e6, max_freq_error=5e-6, jitter=83e-9)
59 | if clock_type == 'sbs':
60 | return Clock(epoch=None, freq=20e6, max_freq_error=100e-6, jitter=500e-9)
61 | if clock_type == 'dump1090':
62 | return Clock(epoch=None, freq=12e6, max_freq_error=100e-6, jitter=500e-9)
63 | raise NotImplementedError
64 |
65 |
66 | class ClockPairing(object):
67 | """Describes the current relative characteristics of a pair of clocks."""
68 |
69 | KP = 0.05
70 | KI = 0.01
71 |
72 | def __init__(self, base, peer):
73 | self.base = base
74 | self.peer = peer
75 | self.base_clock = base.clock
76 | self.peer_clock = peer.clock
77 | self.raw_drift = None
78 | self.drift = None
79 | self.i_drift = None
80 | self.n = 0
81 | self.ts_base = []
82 | self.ts_peer = []
83 | self.var = []
84 | self.var_sum = 0.0
85 | self.outliers = 0
86 | self.cumulative_error = 0.0
87 |
88 | self.relative_freq = peer.clock.freq / base.clock.freq
89 | self.i_relative_freq = base.clock.freq / peer.clock.freq
90 | self.drift_max = base.clock.max_freq_error + peer.clock.max_freq_error
91 | self.drift_max_delta = self.drift_max / 10.0
92 | self.outlier_threshold = 5 * math.sqrt(peer.clock.jitter ** 2 + base.clock.jitter ** 2) # 5 sigma
93 |
94 | now = time.monotonic()
95 | self.expiry = now + 120.0
96 | self.validity = now + 30.0
97 |
98 | def is_new(self, base_ts):
99 | """Returns True if the given base timestamp is in the extrapolation region."""
100 | return bool(self.n == 0 or self.ts_base[-1] < base_ts)
101 |
102 | @property
103 | def variance(self):
104 | """Variance of recent predictions of the sync point versus the actual sync point."""
105 | if self.n == 0:
106 | return None
107 | return self.var_sum / self.n
108 |
109 | @property
110 | def error(self):
111 | """Standard error of recent predictions."""
112 | if self.n == 0:
113 | return None
114 | return math.sqrt(self.var_sum / self.n)
115 |
116 | @property
117 | def valid(self):
118 | """True if this pairing is usable for clock syncronization."""
119 | return bool(self.n >= 2 and (self.var_sum / self.n) < 16e-12 and
120 | self.outliers == 0 and self.validity > time.monotonic())
121 |
122 | def update(self, address, base_ts, peer_ts, base_interval, peer_interval):
123 | """Update the relative drift and offset of this pairing given:
124 |
125 | address: the ICAO address of the sync aircraft, for logging purposes
126 | base_ts: the timestamp of a recent point in time measured by the base clock
127 | peer_ts: the timestamp of the same point in time measured by the peer clock
128 | base_interval: the duration of a recent interval measured by the base clock
129 | peer_interval: the duration of the same interval measured by the peer clock
130 |
131 | Returns True if the update was used, False if it was an outlier.
132 | """
133 |
134 | # clean old data
135 | self._prune_old_data(base_ts)
136 |
137 | # predict from existing data, compare to actual value
138 | if self.n > 0:
139 | prediction = self.predict_peer(base_ts)
140 | prediction_error = (prediction - peer_ts) / self.peer_clock.freq
141 |
142 | if abs(prediction_error) > self.outlier_threshold and abs(prediction_error) > self.error * 5:
143 | self.outliers += 1
144 | if self.outliers < 5:
145 | # don't accept this one
146 | return False
147 | else:
148 | prediction_error = 0 # first sync point, no error
149 |
150 | # update clock drift based on interval ratio
151 | # this might reject the update
152 | if not self._update_drift(address, base_interval, peer_interval):
153 | return False
154 |
155 | # update clock offset based on the actual clock values
156 | self._update_offset(address, base_ts, peer_ts, prediction_error)
157 |
158 | now = time.monotonic()
159 | self.expiry = now + 120.0
160 | self.validity = now + 30.0
161 | return True
162 |
163 | def _prune_old_data(self, latest_base_ts):
164 | i = 0
165 | while i < self.n and (latest_base_ts - self.ts_base[i]) > 30*self.base_clock.freq:
166 | i += 1
167 |
168 | if i > 0:
169 | del self.ts_base[0:i]
170 | del self.ts_peer[0:i]
171 | del self.var[0:i]
172 | self.n -= i
173 | self.var_sum = sum(self.var)
174 |
175 | def _update_drift(self, address, base_interval, peer_interval):
176 | # try to reduce the effects of catastropic cancellation here:
177 | #new_drift = (peer_interval / base_interval) / self.relative_freq - 1.0
178 | adjusted_base_interval = base_interval * self.relative_freq
179 | new_drift = (peer_interval - adjusted_base_interval) / adjusted_base_interval
180 |
181 | if abs(new_drift) > self.drift_max:
182 | # Bad data, ignore entirely
183 | return False
184 |
185 | if self.drift is None:
186 | # First sample, just trust it outright
187 | self.raw_drift = self.drift = new_drift
188 | self.i_drift = -self.drift / (1.0 + self.drift)
189 | return True
190 |
191 | drift_error = new_drift - self.raw_drift
192 | if abs(drift_error) > self.drift_max_delta:
193 | # Too far away from the value we expect, discard
194 | return False
195 |
196 | # move towards the new value
197 | self.raw_drift += drift_error * self.KP
198 | self.drift = self.raw_drift - self.KI * self.cumulative_error
199 | self.i_drift = -self.drift / (1.0 + self.drift)
200 | return True
201 |
202 | def _update_offset(self, address, base_ts, peer_ts, prediction_error):
203 | # insert this into self.ts_base / self.ts_peer / self.var in the right place
204 | if self.n != 0:
205 | assert base_ts > self.ts_base[-1]
206 |
207 | # ts_base and ts_peer define a function constructed by linearly
208 | # interpolating between each pair of values.
209 | #
210 | # This function must be monotonically increasing or one of our clocks
211 | # has effectively gone backwards. If this happens, give up and start
212 | # again.
213 |
214 | if peer_ts < self.ts_peer[-1]:
215 | glogger.info("{0}: monotonicity broken, reset".format(self))
216 | self.ts_base = []
217 | self.ts_peer = []
218 | self.var = []
219 | self.var_sum = 0
220 | self.cumulative_error = 0
221 | self.n = 0
222 |
223 | self.n += 1
224 | self.ts_base.append(base_ts)
225 | self.ts_peer.append(peer_ts)
226 |
227 | p_var = prediction_error ** 2
228 | self.var.append(p_var)
229 | self.var_sum += p_var
230 |
231 | # if we are accepting an outlier, do not include it in our integral term
232 | if not self.outliers:
233 | self.cumulative_error = max(-50e-6, min(50e-6, self.cumulative_error + prediction_error)) # limit to 50us
234 |
235 | self.outliers = max(0, self.outliers - 2)
236 |
237 | if self.outliers and abs(prediction_error) > self.outlier_threshold:
238 | glogger.info("{r}: {a:06X}: step by {e:.1f}us".format(r=self, a=address, e=prediction_error*1e6))
239 |
240 | def predict_peer(self, base_ts):
241 | """
242 | Given a time from the base clock, predict the time of the peer clock.
243 | """
244 |
245 | if self.n == 0:
246 | return None
247 |
248 | i = bisect.bisect_left(self.ts_base, base_ts)
249 | if i == 0:
250 | # extrapolate before first point
251 | elapsed = base_ts - self.ts_base[0]
252 | return (self.ts_peer[0] +
253 | elapsed * self.relative_freq +
254 | elapsed * self.relative_freq * self.drift)
255 | elif i == self.n:
256 | # extrapolate after last point
257 | elapsed = base_ts - self.ts_base[-1]
258 | return (self.ts_peer[-1] +
259 | elapsed * self.relative_freq +
260 | elapsed * self.relative_freq * self.drift)
261 | else:
262 | # interpolate between two points
263 | return (self.ts_peer[i-1] +
264 | (self.ts_peer[i] - self.ts_peer[i-1]) *
265 | (base_ts - self.ts_base[i-1]) /
266 | (self.ts_base[i] - self.ts_base[i-1]))
267 |
268 | def predict_base(self, peer_ts):
269 | """
270 | Given a time from the peer clock, predict the time of the base
271 | clock.
272 | """
273 |
274 | if self.n == 0:
275 | return None
276 |
277 | i = bisect.bisect_left(self.ts_peer, peer_ts)
278 | if i == 0:
279 | # extrapolate before first point
280 | elapsed = peer_ts - self.ts_peer[0]
281 | return (self.ts_base[0] +
282 | elapsed * self.i_relative_freq +
283 | elapsed * self.i_relative_freq * self.i_drift)
284 | elif i == self.n:
285 | # extrapolate after last point
286 | elapsed = peer_ts - self.ts_peer[-1]
287 | return (self.ts_base[-1] +
288 | elapsed * self.i_relative_freq +
289 | elapsed * self.i_relative_freq * self.i_drift)
290 | else:
291 | # interpolate between two points
292 | return (self.ts_base[i-1] +
293 | (self.ts_base[i] - self.ts_base[i-1]) *
294 | (peer_ts - self.ts_peer[i-1]) /
295 | (self.ts_peer[i] - self.ts_peer[i-1]))
296 |
297 | def __str__(self):
298 | return self.base.uuid + ':' + self.peer.uuid
299 |
--------------------------------------------------------------------------------
/modes/message.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Top-level decoder for Mode S responses and ADS-B extended squitter messages.
21 | """
22 |
23 | __all__ = ('ESType', 'decode', 'DF0', 'DF4', 'DF5', 'DF11', 'DF16',
24 | 'DF17', 'DF18', 'DF20', 'DF21', 'ExtendedSquitter', 'CommB')
25 |
26 | from enum import Enum
27 |
28 | from . import altitude
29 | from . import squawk
30 | from . import crc
31 |
32 | ais_charset = " ABCDEFGHIJKLMNOPQRSTUVWXYZ????? ???????????????0123456789??????"
33 |
34 |
35 | class ModeSMessage:
36 | """
37 | A decoded Mode S message.
38 |
39 | All subclasses have the following fields present, though some may be
40 | set to None:
41 |
42 | DF: downlink format
43 | address: ICAO address of transmitting aircraft. For some message types
44 | this is derived from the CRC field and may be unreliable.
45 | altitude: decoded altitude in feet, or None if not present / not available
46 | callsign: decoded callsign, or None if not present
47 | squawk: decoded squawk, or None if not present
48 | crc_ok: True if the CRC is OK. False if it is bad. None if the correctness
49 | of the CRC cannot be checked (e.g. the messages uses AP or PI)
50 | """
51 |
52 |
53 | class DF0(ModeSMessage):
54 | """
55 | DF0 (Short air-air surveillance / ACAS) message.
56 |
57 | Fields: DF, VS, CC, SL, RI, AC, altitude, address
58 | """
59 |
60 | def __init__(self, frombuf):
61 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
62 | self.VS = (frombuf[0] & 0x04) >> 2 # 1 bit
63 | self.CC = (frombuf[0] & 0x02) >> 1 # 1 bit
64 | # 1 bit pad
65 | self.SL = (frombuf[1] & 0xe0) >> 5 # 3 bits
66 | # 2 bits pad
67 | self.RI = ((frombuf[1] & 0x03) << 1) | ((frombuf[2] & 0x80) >> 7) # 4 bits
68 | # 2 bits pad
69 | self.AC = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
70 | # 24 bits A/P
71 |
72 | self.squawk = self.callsign = None
73 | self.altitude = altitude.decode_ac13(self.AC)
74 | self.crc_ok = None
75 | self.address = crc.residual(frombuf)
76 |
77 |
78 | class DF4(ModeSMessage):
79 | """
80 | DF4 (Surveillance, altitude reply) message.
81 |
82 | Fields: DF, FS, DR, UM, AC, altitude, address
83 | """
84 |
85 | def __init__(self, frombuf):
86 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
87 | self.FS = (frombuf[0] & 0x07) # 3 bits
88 | self.DR = (frombuf[1] & 0xf8) >> 3 # 5 bits
89 | self.UM = ((frombuf[1] & 0x07) << 3) | ((frombuf[2] & 0xe0) >> 5) # 6 bits
90 | self.AC = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
91 | # 24 bits A/P
92 |
93 | self.squawk = self.callsign = None
94 | self.altitude = altitude.decode_ac13(self.AC)
95 | self.crc_ok = None
96 | self.address = crc.residual(frombuf)
97 |
98 |
99 | class DF5(ModeSMessage):
100 | """
101 | DF5 (Surveillance, identity reply) message.
102 |
103 | Fields: DF, FS, DR, UM, ID, squawk, address
104 | """
105 |
106 | def __init__(self, frombuf):
107 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
108 | self.FS = (frombuf[0] & 0x07) # 3 bits
109 | self.DR = (frombuf[1] & 0xf8) >> 3 # 5 bits
110 | self.UM = ((frombuf[1] & 0x07) << 3) | ((frombuf[2] & 0xe0) >> 5) # 6 bits
111 | self.ID = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
112 | # 24 bits A/P
113 |
114 | self.altitude = self.callsign = None
115 | self.squawk = squawk.decode_id13(self.ID)
116 | self.crc_ok = None
117 | self.address = crc.residual(frombuf)
118 |
119 |
120 | class DF11(ModeSMessage):
121 | """
122 | DF11 (All-call reply) message.
123 |
124 | Fields: DF, CA, AA, address, crc_ok
125 | """
126 |
127 | def __init__(self, frombuf):
128 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
129 | self.CA = (frombuf[0] & 0x07) # 3 bits
130 | self.AA = (frombuf[1] << 16) | (frombuf[2] << 8) | frombuf[3] # 24 bits
131 | # 24 bits P/I
132 |
133 | self.squawk = self.callsign = self.altitude = None
134 |
135 | r = crc.residual(frombuf)
136 | if r == 0:
137 | self.crc_ok = True
138 | elif (r & ~0x7f) == 0:
139 | self.crc_ok = None
140 | else:
141 | self.crc_ok = False
142 | self.address = self.AA
143 |
144 |
145 | class DF16(ModeSMessage):
146 | """
147 | DF16 (Long air-air surveillance / ACAS) message.
148 |
149 | Fields: DF, VS, SL, RI, AC, altitude, address
150 | """
151 |
152 | def __init__(self, frombuf):
153 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
154 | self.VS = (frombuf[0] & 0x04) >> 2 # 1 bit
155 | # 2 bits pad
156 | self.SL = (frombuf[1] & 0xe0) >> 5 # 3 bits
157 | # 2 bits pad
158 | self.RI = ((frombuf[1] & 0x03) << 1) | ((frombuf[2] & 0x80) >> 7) # 4 bits
159 | # 2 bits pad
160 | self.AC = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
161 | self.MV = frombuf[4:11] # 56 bits
162 | # 24 bits A/P
163 |
164 | self.squawk = self.callsign = None
165 | self.altitude = altitude.decode_ac13(self.AC)
166 | self.crc_ok = None
167 | self.address = crc.residual(frombuf)
168 |
169 |
170 | class CommB(ModeSMessage):
171 | """A message containing a Comm-B reply.
172 |
173 | Fields: MB, callsign
174 | """
175 |
176 | def __init__(self, frombuf):
177 | self.MB = frombuf[4:11] # 56 bits
178 |
179 | if frombuf[4] != 0x20:
180 | self.callsign = None
181 | else:
182 | callsign = (
183 | ais_charset[(frombuf[5] & 0xfc) >> 2] +
184 | ais_charset[((frombuf[5] & 0x03) << 4) | ((frombuf[6] & 0xf0) >> 4)] +
185 | ais_charset[((frombuf[6] & 0x0f) << 2) | ((frombuf[7] & 0xc0) >> 6)] +
186 | ais_charset[frombuf[7] & 0x3f] +
187 | ais_charset[(frombuf[8] & 0xfc) >> 2] +
188 | ais_charset[((frombuf[8] & 0x03) << 4) | ((frombuf[9] & 0xf0) >> 4)] +
189 | ais_charset[((frombuf[9] & 0x0f) << 2) | ((frombuf[10] & 0xc0) >> 6)] +
190 | ais_charset[frombuf[10] & 0x3f]
191 | )
192 |
193 | if callsign != ' ' and callsign.find('?') == -1:
194 | self.callsign = callsign
195 | else:
196 | self.callsign = None
197 |
198 |
199 | class DF20(CommB):
200 | """
201 | DF20 (Comm-B, altitude reply) message.
202 |
203 | Fields: DF, FS, DR, UM, AC, altitude, address, MB, callsign
204 | """
205 |
206 | def __init__(self, frombuf):
207 | CommB.__init__(self, frombuf)
208 |
209 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
210 | self.FS = (frombuf[0] & 0x07) # 3 bits
211 | self.DR = (frombuf[1] & 0xf8) >> 3 # 5 bits
212 | self.UM = ((frombuf[1] & 0x07) << 3) | ((frombuf[2] & 0xe0) >> 5) # 6 bits
213 | self.AC = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
214 | # 56 bits MB
215 | # 24 bits A/P
216 |
217 | self.squawk = None
218 | self.altitude = altitude.decode_ac13(self.AC)
219 | self.crc_ok = None
220 | self.address = crc.residual(frombuf)
221 |
222 |
223 | class DF21(CommB):
224 | """
225 | DF21 (Comm-B, identity reply) message.
226 |
227 | Fields: DF, FS, DR, UM, ID, squawk, address, MB, callsign
228 | """
229 |
230 | def __init__(self, frombuf):
231 | CommB.__init__(self, frombuf)
232 |
233 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
234 | self.FS = (frombuf[0] & 0x07) # 3 bits
235 | self.DR = (frombuf[1] & 0xf8) >> 3 # 5 bits
236 | self.UM = ((frombuf[1] & 0x07) << 3) | ((frombuf[2] & 0xe0) >> 5) # 6 bits
237 | self.ID = ((frombuf[2] & 0x1f) << 8) | frombuf[3] # 13 bits
238 | # 56 bits MB
239 | # 24 bits A/P
240 |
241 | self.altitude = None
242 | self.squawk = squawk.decode_id13(self.ID)
243 | self.crc_ok = None
244 | self.address = crc.residual(frombuf)
245 |
246 |
247 | class ESType(Enum):
248 | """Identifies the type of an Extended Squitter message."""
249 | id_and_category = 1
250 | airborne_position = 2
251 | surface_position = 3
252 | airborne_velocity = 4
253 | other = 5
254 |
255 | es_types = {
256 | 0: (ESType.airborne_position, 0),
257 | 1: (ESType.id_and_category, None),
258 | 2: (ESType.id_and_category, None),
259 | 3: (ESType.id_and_category, None),
260 | 4: (ESType.id_and_category, None),
261 | 5: (ESType.surface_position, 9),
262 | 6: (ESType.surface_position, 8),
263 | 7: (ESType.surface_position, 7),
264 | 8: (ESType.surface_position, 6),
265 | 9: (ESType.airborne_position, 9),
266 | 10: (ESType.airborne_position, 8),
267 | 11: (ESType.airborne_position, 7),
268 | 12: (ESType.airborne_position, 6),
269 | 13: (ESType.airborne_position, 5),
270 | 14: (ESType.airborne_position, 4),
271 | 15: (ESType.airborne_position, 3),
272 | 16: (ESType.airborne_position, 2),
273 | 17: (ESType.airborne_position, 1),
274 | 18: (ESType.airborne_position, 0),
275 | 19: (ESType.airborne_velocity, None),
276 | 20: (ESType.airborne_position, 9),
277 | 21: (ESType.airborne_position, 8),
278 | 22: (ESType.airborne_position, 0)
279 | }
280 |
281 |
282 | class ExtendedSquitter(ModeSMessage):
283 | """A message that carries an Extended Squitter message.
284 |
285 | Fields: estype, nuc
286 |
287 | For airborne positions: SS, SAF, AC12, T, F, LAN, LON, altitude
288 | For id and category: CATEGORY, callsign
289 | """
290 |
291 | def __init__(self, frombuf):
292 | metype = (frombuf[4] & 0xf8) >> 3
293 | self.estype, self.nuc = es_types.get(metype, (ESType.other, None))
294 |
295 | if self.estype is ESType.airborne_position:
296 | self.SS = (frombuf[4] & 0x06) >> 1
297 | self.SAF = frombuf[4] & 0x01
298 | self.AC12 = (frombuf[5] << 4) | ((frombuf[6] & 0xf0) >> 4)
299 | self.T = (frombuf[6] & 0x08) >> 3
300 | self.F = (frombuf[6] & 0x04) >> 2
301 | self.LAT = (((frombuf[6] & 0x03) << 15) |
302 | (frombuf[7] << 7) |
303 | ((frombuf[8] & 0xfe) >> 1))
304 | self.LON = (((frombuf[8] & 0x01) << 16) |
305 | (frombuf[9] << 8) |
306 | frombuf[10])
307 | self.altitude = altitude.decode_ac12(self.AC12)
308 | self.callsign = None
309 |
310 | elif self.estype is ESType.id_and_category:
311 | self.CATEGORY = frombuf[4] & 0x07
312 | self.altitude = None
313 | self.callsign = (
314 | ais_charset[(frombuf[5] & 0xfc) >> 2] +
315 | ais_charset[((frombuf[5] & 0x03) << 4) | ((frombuf[6] & 0xf0) >> 4)] +
316 | ais_charset[((frombuf[6] & 0x0f) << 2) | ((frombuf[7] & 0xc0) >> 6)] +
317 | ais_charset[frombuf[7] & 0x3f] +
318 | ais_charset[(frombuf[8] & 0xfc) >> 2] +
319 | ais_charset[((frombuf[8] & 0x03) << 4) | ((frombuf[9] & 0xf0) >> 4)] +
320 | ais_charset[((frombuf[9] & 0x0f) << 2) | ((frombuf[10] & 0xc0) >> 6)] +
321 | ais_charset[frombuf[10] & 0x3f]
322 | )
323 |
324 | else:
325 | self.altitude = None
326 | self.callsign = None
327 |
328 |
329 | class DF17(ExtendedSquitter):
330 | """DF17 (Extended Squitter) message.
331 |
332 | Fields: DF, CA, AA, address, crc_ok; plus those of ExtendedSquitter.
333 | """
334 |
335 | def __init__(self, frombuf):
336 | ExtendedSquitter.__init__(self, frombuf)
337 |
338 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
339 | self.CA = (frombuf[0] & 0x07) # 3 bits
340 | self.AA = (frombuf[1] << 16) | (frombuf[2] << 8) | frombuf[3] # 24 bits
341 | # 56 bits ME
342 | # 24 bits CRC
343 |
344 | self.squawk = None
345 | self.crc_ok = (crc.residual(frombuf) == 0)
346 | self.address = self.AA
347 |
348 |
349 | class DF18(ExtendedSquitter):
350 | """DF18 (Extended Squitter / Non-Transponder) message.
351 |
352 | Fields: DF, CF, AA, address, crc_ok; plus those of ExtendedSquitter.
353 | """
354 |
355 | def __init__(self, frombuf):
356 | ExtendedSquitter.__init__(self, frombuf)
357 |
358 | self.DF = (frombuf[0] & 0xf8) >> 3 # 5 bits
359 | self.CF = (frombuf[0] & 0x07) # 3 bits
360 | self.AA = (frombuf[1] << 16) | (frombuf[2] << 8) | frombuf[3] # 24 bits
361 | # 56 bits ME
362 | # 24 bits CRC
363 |
364 | self.squawk = None
365 | self.crc_ok = (crc.residual(frombuf) == 0)
366 | self.address = self.AA
367 |
368 |
369 | message_types = {
370 | 0: DF0,
371 | 4: DF4,
372 | 5: DF5,
373 | 11: DF11,
374 | 16: DF16,
375 | 17: DF17,
376 | 18: DF18,
377 | 20: DF20,
378 | 21: DF21
379 | }
380 |
381 |
382 | def decode(frombuf):
383 | """
384 | Decode a Mode S message.
385 |
386 | frombuf: a 7-byte or 14-byte message containing the encoded Mode S message
387 |
388 | Returns a suitable message object, or None if the message type is not
389 | handled.
390 | """
391 |
392 | df = (frombuf[0] & 0xf8) >> 3
393 | try:
394 | return message_types[df](frombuf)
395 | except KeyError:
396 | return None
397 |
--------------------------------------------------------------------------------
/mlat/server/clocktrack.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Manages the clock synchronization pairs between all receivers based on
21 | DF17 position messages received by more than one receiver.
22 | """
23 |
24 | __all__ = ('SyncPoint', 'ClockTracker')
25 |
26 | import asyncio
27 | import functools
28 | import time
29 | import logging
30 |
31 | import modes.message
32 |
33 | from mlat import geodesy, constants, profile
34 | from mlat.server import clocksync, config
35 |
36 |
37 | class SyncPoint(object):
38 | """A potential clock synchronization point.
39 | Clock synchronization points are a pair of DF17 messages,
40 | and associated timing info from all receivers that see
41 | that pair.
42 | """
43 |
44 | def __init__(self, address, posA, posB, interval):
45 | """Construct a new sync point.
46 |
47 | address: the ICAO address of the sync aircraft
48 | posA: the ECEF position of the earlier message
49 | posB: the ECEF position of the later message
50 | interval: the nominal interval (in seconds)
51 | between the two messages; this is as measured by
52 | the first receiver to report the pair, and is used
53 | to distinguish cases where the same message is
54 | transmitted more than once.
55 | """
56 |
57 | self.address = address
58 | self.posA = posA
59 | self.posB = posB
60 | self.interval = interval
61 | self.receivers = [] # a list of (receiver, timestampA, timestampB) values
62 |
63 |
64 | class ClockTracker(object):
65 | """Maintains clock pairings between receivers, and matches up incoming sync messages
66 | from receivers to update the parameters of the pairings."""
67 |
68 | def __init__(self):
69 | # map of (sync key) -> list of sync points
70 | #
71 | # sync key is a pair of bytearrays: (msgA, msgB)
72 | # where msgA and msgB are the contents of the
73 | # earlier and later message of the pair respectively.
74 | self.sync_points = {}
75 |
76 | # map of (pair key) -> pairing
77 | #
78 | # pair key is (receiver 0, receiver 1) where receiver 0
79 | # is always less than receiver 1.
80 | self.clock_pairs = {}
81 |
82 | # schedule periodic cleanup
83 | asyncio.get_event_loop().call_later(1.0, self._cleanup)
84 |
85 | def _cleanup(self):
86 | """Called periodically to clean up clock pairings that have expired."""
87 |
88 | asyncio.get_event_loop().call_later(30.0, self._cleanup)
89 |
90 | now = time.monotonic()
91 | prune = set()
92 | for k, pairing in self.clock_pairs.items():
93 | if pairing.expiry <= now:
94 | prune.add(k)
95 |
96 | for k in prune:
97 | del self.clock_pairs[k]
98 |
99 | @profile.trackcpu
100 | def receiver_clock_reset(self, receiver):
101 | """
102 | Called by the coordinator when we should drop our clock sync
103 | state for a given receiver. This happens on input disconnect/
104 | reconnect.
105 |
106 | (This is actually the same work as receiver_disconnect for the moment)
107 | """
108 | for k in list(self.clock_pairs.keys()):
109 | if k[0] is receiver or k[1] is receiver:
110 | del self.clock_pairs[k]
111 |
112 | @profile.trackcpu
113 | def receiver_disconnect(self, receiver):
114 | """
115 | Called by the coordinator when a receiver disconnects.
116 |
117 | Clears up any clock pairing involving the receiver immediately,
118 | as it's very likely that any existing sync data will be invalid
119 | if/when the receiver later reconnects.
120 |
121 | Sync points involving the receiver are not cleaned up immediately.
122 | It's assumed that the disconnected receiver has the "dead" flag
123 | set; this flag is tested before sync happens.
124 | """
125 |
126 | # Clean up clock_pairs immediately.
127 | # Any membership in a pending sync point is noticed when we try to sync more receivers with it.
128 | for k in list(self.clock_pairs.keys()):
129 | if k[0] is receiver or k[1] is receiver:
130 | del self.clock_pairs[k]
131 |
132 | @profile.trackcpu
133 | def receiver_sync(self, receiver,
134 | even_time, odd_time,
135 | even_message, odd_message):
136 | """
137 | Called by the coordinator to handle a sync message from a receiver.
138 |
139 | Looks for a suitable existing sync point and, if there is one, does
140 | synchronization between this receiver and the existing receivers
141 | associated with the sync point.
142 |
143 | Otherwise, validates the message pair and, if it is suitable, creates a
144 | new sync point for it.
145 |
146 | receiver: the receiver reporting the sync message
147 | even_message: a DF17 airborne position message with F=0
148 | odd_message: a DF17 airborne position message with F=1
149 | even_time: the time of arrival of even_message, as seen by receiver.clock
150 | odd_time: the time of arrival of odd_message, as seen by receiver.clock
151 | """
152 |
153 | # Do sanity checks.
154 |
155 | # Messages must be within 5 seconds of each other.
156 | if abs(even_time - odd_time) / receiver.clock.freq > 5.0:
157 | return
158 |
159 | # compute key and interval
160 | if even_time < odd_time:
161 | tA = even_time
162 | tB = odd_time
163 | key = (even_message, odd_message)
164 | else:
165 | tA = odd_time
166 | tB = even_time
167 | key = (odd_message, even_message)
168 |
169 | interval = (tB - tA) / receiver.clock.freq
170 |
171 | # do we have a suitable existing match?
172 | syncpointlist = self.sync_points.get(key)
173 | if syncpointlist:
174 | for candidate in syncpointlist:
175 | if abs(candidate.interval - interval) < 1e-3:
176 | # interval matches within 1ms, close enough.
177 | self._add_to_existing_syncpoint(candidate, receiver, tA, tB)
178 | return
179 |
180 | # No existing match. Validate the messages and maybe create a new sync point
181 |
182 | # basic validity
183 | even_message = modes.message.decode(even_message)
184 | if ((not even_message or
185 | even_message.DF != 17 or
186 | not even_message.crc_ok or
187 | even_message.estype != modes.message.ESType.airborne_position or
188 | even_message.F)):
189 | return
190 |
191 | odd_message = modes.message.decode(odd_message)
192 | if ((not odd_message or
193 | odd_message.DF != 17 or
194 | not odd_message.crc_ok or
195 | odd_message.estype != modes.message.ESType.airborne_position or
196 | not odd_message.F)):
197 | return
198 |
199 | if even_message.address != odd_message.address:
200 | return
201 |
202 | # quality checks
203 | if even_message.nuc < 6 or even_message.altitude is None:
204 | return
205 |
206 | if odd_message.nuc < 6 or odd_message.altitude is None:
207 | return
208 |
209 | if abs(even_message.altitude - odd_message.altitude) > 5000:
210 | return
211 |
212 | # find global positions
213 | try:
214 | even_lat, even_lon, odd_lat, odd_lon = modes.cpr.decode(even_message.LAT,
215 | even_message.LON,
216 | odd_message.LAT,
217 | odd_message.LON)
218 | except ValueError:
219 | # CPR failed
220 | return
221 |
222 | # convert to ECEF, do range checks
223 | even_ecef = geodesy.llh2ecef((even_lat,
224 | even_lon,
225 | even_message.altitude * constants.FTOM))
226 | if geodesy.ecef_distance(even_ecef, receiver.position) > config.MAX_RANGE:
227 | logging.info("{a:06X}: receiver range check (even) failed".format(a=even_message.address))
228 | return
229 |
230 | odd_ecef = geodesy.llh2ecef((odd_lat,
231 | odd_lon,
232 | odd_message.altitude * constants.FTOM))
233 | if geodesy.ecef_distance(odd_ecef, receiver.position) > config.MAX_RANGE:
234 | logging.info("{a:06X}: receiver range check (odd) failed".format(a=odd_message.address))
235 | return
236 |
237 | if geodesy.ecef_distance(even_ecef, odd_ecef) > config.MAX_INTERMESSAGE_RANGE:
238 | logging.info("{a:06X}: intermessage range check failed".format(a=even_message.address))
239 | return
240 |
241 | # valid. Create a new sync point.
242 | if even_time < odd_time:
243 | syncpoint = SyncPoint(even_message.address, even_ecef, odd_ecef, interval)
244 | else:
245 | syncpoint = SyncPoint(even_message.address, odd_ecef, even_ecef, interval)
246 |
247 | syncpoint.receivers.append([receiver, tA, tB, False])
248 | if not syncpointlist:
249 | syncpointlist = self.sync_points[key] = []
250 | syncpointlist.append(syncpoint)
251 |
252 | # schedule cleanup of the syncpoint after 2 seconds -
253 | # we should have seen all copies of those messages by
254 | # then.
255 | asyncio.get_event_loop().call_later(
256 | 2.0,
257 | functools.partial(self._cleanup_syncpoint,
258 | key=key,
259 | syncpoint=syncpoint))
260 |
261 | def _add_to_existing_syncpoint(self, syncpoint, r0, t0A, t0B):
262 | # add a new receiver and timestamps to an existing syncpoint
263 |
264 | # new state for the syncpoint: receiver, timestamp A, timestamp B,
265 | # and a flag indicating if this receiver actually managed to sync
266 | # with another receiver using this syncpoint (used for stats)
267 | r0l = [r0, t0A, t0B, False]
268 |
269 | # try to sync the new receiver with all receivers that previously
270 | # saw the same pair
271 | for r1l in syncpoint.receivers:
272 | r1, t1A, t1B, r1sync = r1l
273 |
274 | if r1.dead:
275 | # receiver went away before we started resolving this
276 | continue
277 |
278 | if r0 is r1:
279 | # odd, but could happen
280 | continue
281 |
282 | # order the clockpair so that the receiver that sorts lower is the base clock
283 | if r0 < r1:
284 | if self._do_sync(syncpoint.address, syncpoint.posA, syncpoint.posB, r0, t0A, t0B, r1, t1A, t1B):
285 | # sync worked, note it for stats
286 | r0l[3] = r1l[3] = True
287 | else:
288 | if self._do_sync(syncpoint.address, syncpoint.posA, syncpoint.posB, r1, t1A, t1B, r0, t0A, t0B):
289 | # sync worked, note it for stats
290 | r0l[3] = r1l[3] = True
291 |
292 | # update syncpoint with the new receiver and we're done
293 | syncpoint.receivers.append(r0l)
294 |
295 | @profile.trackcpu
296 | def _cleanup_syncpoint(self, key, syncpoint):
297 | """Expire a syncpoint. This happens ~2 seconds after the first copy
298 | of a message pair is received.
299 |
300 | key: the key of the syncpoint
301 | syncpoint: the syncpoint itself
302 | """
303 |
304 | # remove syncpoint from self.sync_points, clean up empty entries
305 | l = self.sync_points[key]
306 | l.remove(syncpoint)
307 | if not l:
308 | del self.sync_points[key]
309 |
310 | # stats update
311 | for r, _, _, synced in syncpoint.receivers:
312 | if synced:
313 | r.sync_count += 1
314 |
315 | def _do_sync(self, address, posA, posB, r0, t0A, t0B, r1, t1A, t1B):
316 | # find or create clock pair
317 | k = (r0, r1)
318 | pairing = self.clock_pairs.get(k)
319 | if pairing is None:
320 | self.clock_pairs[k] = pairing = clocksync.ClockPairing(r0, r1)
321 |
322 | # propagation delays, in clock units
323 | delay0A = geodesy.ecef_distance(posA, r0.position) * r0.clock.freq / constants.Cair
324 | delay0B = geodesy.ecef_distance(posB, r0.position) * r0.clock.freq / constants.Cair
325 | delay1A = geodesy.ecef_distance(posA, r1.position) * r1.clock.freq / constants.Cair
326 | delay1B = geodesy.ecef_distance(posB, r1.position) * r1.clock.freq / constants.Cair
327 |
328 | # compute intervals, adjusted for transmitter motion
329 | i0 = (t0B - delay0B) - (t0A - delay0A)
330 | i1 = (t1B - delay1B) - (t1A - delay1A)
331 |
332 | if not pairing.is_new(t0B - delay0B):
333 | return True # timestamp is in the past or duplicated, don't use this
334 |
335 | # do the update
336 | return pairing.update(address, t0B - delay0B, t1B - delay1B, i0, i1)
337 |
338 | def dump_receiver_state(self, receiver):
339 | state = {}
340 | for (r0, r1), pairing in self.clock_pairs.items():
341 | if pairing.n < 2:
342 | continue
343 | if r0 is receiver:
344 | state[r1.uuid] = [pairing.n,
345 | round(pairing.error * 1e6, 1),
346 | round(pairing.drift * 1e6, 2),
347 | pairing.ts_peer[-1] - pairing.ts_base[-1]]
348 | elif r1 is receiver:
349 | state[r0.uuid] = [pairing.n,
350 | round(pairing.error * 1e6, 1),
351 | round(pairing.i_drift * 1e6, 2),
352 | pairing.ts_base[-1] - pairing.ts_peer[-1]]
353 | return state
354 |
--------------------------------------------------------------------------------
/mlat/server/coordinator.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Top level glue that knows about all receivers and moves data between
21 | the various sub-objects that make up the server.
22 | """
23 |
24 | import signal
25 | import asyncio
26 | import json
27 | import logging
28 | import time
29 | from contextlib import closing
30 |
31 | from mlat import geodesy, profile, constants
32 | from mlat.server import tracker, clocksync, clocktrack, mlattrack, util
33 |
34 | glogger = logging.getLogger("coordinator")
35 |
36 |
37 | class Receiver(object):
38 | """Represents a particular connected receiver and the associated
39 | connection that manages it."""
40 |
41 | def __init__(self, uuid, user, connection, clock, position_llh, privacy, connection_info):
42 | self.uuid = uuid
43 | self.user = user
44 | self.connection = connection
45 | self.clock = clock
46 | self.position_llh = position_llh
47 | self.position = geodesy.llh2ecef(position_llh)
48 | self.privacy = privacy
49 | self.connection_info = connection_info
50 | self.dead = False
51 |
52 | self.sync_count = 0
53 | self.last_rate_report = None
54 | self.tracking = set()
55 | self.sync_interest = set()
56 | self.mlat_interest = set()
57 | self.requested = set()
58 |
59 | self.distance = {}
60 |
61 | def update_interest_sets(self, new_sync, new_mlat):
62 | for added in new_sync.difference(self.sync_interest):
63 | added.sync_interest.add(self)
64 |
65 | for removed in self.sync_interest.difference(new_sync):
66 | removed.sync_interest.discard(self)
67 |
68 | for added in new_mlat.difference(self.mlat_interest):
69 | added.mlat_interest.add(self)
70 |
71 | for removed in self.mlat_interest.difference(new_mlat):
72 | removed.mlat_interest.discard(self)
73 |
74 | self.sync_interest = new_sync
75 | self.mlat_interest = new_mlat
76 |
77 | @profile.trackcpu
78 | def refresh_traffic_requests(self):
79 | self.requested = {x for x in self.tracking if x.interesting}
80 | self.connection.request_traffic(self, {x.icao for x in self.requested})
81 |
82 | def __lt__(self, other):
83 | return self.uuid < other.uuid
84 |
85 | def __str__(self):
86 | return self.uuid
87 |
88 | def __repr__(self):
89 | return 'Receiver({0!r},{0!r},{1!r})@{2}'.format(self.uuid,
90 | self.user,
91 | self.connection,
92 | id(self))
93 |
94 |
95 | class Coordinator(object):
96 | """Master coordinator. Receives all messages from receivers and dispatches
97 | them to clock sync / multilateration / tracking as needed."""
98 |
99 | def __init__(self, work_dir, partition=(1, 1), tag="mlat", authenticator=None, pseudorange_filename=None):
100 | """If authenticator is not None, it should be a callable that takes two arguments:
101 | the newly created Receiver, plus the 'auth' argument provided by the connection.
102 | The authenticator may modify the receiver if needed. The authenticator should either
103 | return silently on success, or raise an exception (propagated to the caller) on
104 | failure.
105 | """
106 |
107 | self.work_dir = work_dir
108 | self.receivers = {} # keyed by uuid
109 | self.sighup_handlers = []
110 | self.authenticator = authenticator
111 | self.partition = partition
112 | self.tag = tag
113 | self.tracker = tracker.Tracker(partition)
114 | self.clock_tracker = clocktrack.ClockTracker()
115 | self.mlat_tracker = mlattrack.MlatTracker(self,
116 | blacklist_filename=work_dir + '/blacklist.txt',
117 | pseudorange_filename=pseudorange_filename)
118 | self.output_handlers = [self.forward_results]
119 |
120 | self.receiver_mlat = self.mlat_tracker.receiver_mlat
121 | self.receiver_sync = self.clock_tracker.receiver_sync
122 |
123 | def start(self):
124 | self._write_state_task = asyncio.async(self.write_state())
125 | if profile.enabled:
126 | self._write_profile_task = asyncio.async(self.write_profile())
127 | else:
128 | self._write_profile_task = None
129 | return util.completed_future
130 |
131 | def add_output_handler(self, handler):
132 | self.output_handlers.append(handler)
133 |
134 | def remove_output_handler(self, handler):
135 | self.output_handlers.remove(handler)
136 |
137 | # it's a pity that asyncio's add_signal_handler doesn't let you have
138 | # multiple handlers per signal. so wire up a multiple-handler here.
139 | def add_sighup_handler(self, handler):
140 | if not self.sighup_handlers:
141 | asyncio.get_event_loop().add_signal_handler(signal.SIGHUP, self.sighup)
142 | self.sighup_handlers.append(handler)
143 |
144 | def remove_sighup_handler(self, handler):
145 | self.sighup_handlers.remove(handler)
146 | if not self.sighup_handlers:
147 | asyncio.get_event_loop().remove_signal_handler(signal.SIGHUP)
148 |
149 | def sighup(self):
150 | for handler in self.sighup_handlers[:]:
151 | handler()
152 |
153 | @profile.trackcpu
154 | def _really_write_state(self):
155 | aircraft_state = {}
156 | mlat_count = 0
157 | sync_count = 0
158 | now = time.time()
159 | for ac in self.tracker.aircraft.values():
160 | s = aircraft_state['{0:06X}'.format(ac.icao)] = {}
161 | s['interesting'] = 1 if ac.interesting else 0
162 | s['allow_mlat'] = 1 if ac.allow_mlat else 0
163 | s['tracking'] = len(ac.tracking)
164 | s['sync_interest'] = len(ac.sync_interest)
165 | s['mlat_interest'] = len(ac.mlat_interest)
166 | s['mlat_message_count'] = ac.mlat_message_count
167 | s['mlat_result_count'] = ac.mlat_result_count
168 | s['mlat_kalman_count'] = ac.mlat_kalman_count
169 |
170 | if ac.last_result_time is not None and ac.kalman.valid:
171 | s['last_result'] = round(now - ac.last_result_time, 1)
172 | lat, lon, alt = ac.kalman.position_llh
173 | s['lat'] = round(lat, 3)
174 | s['lon'] = round(lon, 3)
175 | s['alt'] = round(alt * constants.MTOF, 0)
176 | s['heading'] = round(ac.kalman.heading, 0)
177 | s['speed'] = round(ac.kalman.ground_speed, 0)
178 |
179 | if ac.interesting:
180 | if ac.sync_interest:
181 | sync_count += 1
182 | if ac.mlat_interest:
183 | mlat_count += 1
184 |
185 | if self.partition[1] > 1:
186 | util.setproctitle('{tag} {i}/{n} ({r} clients) ({m} mlat {s} sync {t} tracked)'.format(
187 | tag=self.tag,
188 | i=self.partition[0],
189 | n=self.partition[1],
190 | r=len(self.receivers),
191 | m=mlat_count,
192 | s=sync_count,
193 | t=len(self.tracker.aircraft)))
194 | else:
195 | util.setproctitle('{tag} ({r} clients) ({m} mlat {s} sync {t} tracked)'.format(
196 | tag=self.tag,
197 | r=len(self.receivers),
198 | m=mlat_count,
199 | s=sync_count,
200 | t=len(self.tracker.aircraft)))
201 |
202 | sync = {}
203 | locations = {}
204 |
205 | for r in self.receivers.values():
206 | sync[r.uuid] = {
207 | 'peers': self.clock_tracker.dump_receiver_state(r)
208 | }
209 | locations[r.uuid] = {
210 | 'user': r.user,
211 | 'lat': r.position_llh[0],
212 | 'lon': r.position_llh[1],
213 | 'alt': r.position_llh[2],
214 | 'privacy': r.privacy,
215 | 'connection': r.connection_info
216 | }
217 |
218 | with closing(open(self.work_dir + '/sync.json', 'w')) as f:
219 | json.dump(sync, fp=f, indent=True)
220 |
221 | with closing(open(self.work_dir + '/locations.json', 'w')) as f:
222 | json.dump(locations, fp=f, indent=True)
223 |
224 | with closing(open(self.work_dir + '/aircraft.json', 'w')) as f:
225 | json.dump(aircraft_state, fp=f, indent=True)
226 |
227 | @asyncio.coroutine
228 | def write_state(self):
229 | while True:
230 | try:
231 | self._really_write_state()
232 | except Exception:
233 | glogger.exception("Failed to write state files")
234 |
235 | yield from asyncio.sleep(30.0)
236 |
237 | @asyncio.coroutine
238 | def write_profile(self):
239 | while True:
240 | yield from asyncio.sleep(60.0)
241 |
242 | try:
243 | with closing(open(self.work_dir + '/cpuprofile.txt', 'w')) as f:
244 | profile.dump_cpu_profiles(f)
245 | except Exception:
246 | glogger.exception("Failed to write CPU profile")
247 |
248 | def close(self):
249 | self._write_state_task.cancel()
250 | if self._write_profile_task:
251 | self._write_profile_task.cancel()
252 |
253 | @asyncio.coroutine
254 | def wait_closed(self):
255 | util.safe_wait([self._write_state_task, self._write_profile_task])
256 |
257 | @profile.trackcpu
258 | def new_receiver(self, connection, uuid, user, auth, position_llh, clock_type, privacy, connection_info):
259 | """Assigns a new receiver ID for a given user.
260 | Returns the new receiver ID.
261 |
262 | May raise ValueError to disallow this receiver."""
263 |
264 | if uuid in self.receivers:
265 | raise ValueError('User {uuid}/{user} is already connected'.format(uuid=uuid, user=user))
266 |
267 | clock = clocksync.make_clock(clock_type)
268 | receiver = Receiver(uuid, user, connection, clock,
269 | position_llh=position_llh,
270 | privacy=privacy,
271 | connection_info=connection_info)
272 |
273 | if self.authenticator is not None:
274 | self.authenticator(receiver, auth) # may raise ValueError if authentication fails
275 |
276 | self._compute_interstation_distances(receiver)
277 |
278 | self.receivers[receiver.uuid] = receiver
279 | return receiver
280 |
281 | def _compute_interstation_distances(self, receiver):
282 | """compute inter-station distances for a receiver"""
283 |
284 | for other_receiver in self.receivers.values():
285 | if other_receiver is receiver:
286 | distance = 0
287 | else:
288 | distance = geodesy.ecef_distance(receiver.position, other_receiver.position)
289 | receiver.distance[other_receiver] = distance
290 | other_receiver.distance[receiver] = distance
291 |
292 | @profile.trackcpu
293 | def receiver_location_update(self, receiver, position_llh):
294 | """Note that a given receiver has moved."""
295 | receiver.position_llh = position_llh
296 | receiver.position = geodesy.llh2ecef(position_llh)
297 |
298 | self._compute_interstation_distances(receiver)
299 |
300 | @profile.trackcpu
301 | def receiver_disconnect(self, receiver):
302 | """Notes that the given receiver has disconnected."""
303 |
304 | receiver.dead = True
305 | self.tracker.remove_all(receiver)
306 | self.clock_tracker.receiver_disconnect(receiver)
307 | self.receivers.pop(receiver.uuid)
308 |
309 | # clean up old distance entries
310 | for other_receiver in self.receivers.values():
311 | other_receiver.distance.pop(receiver, None)
312 |
313 | @profile.trackcpu
314 | def receiver_tracking_add(self, receiver, icao_set):
315 | """Update a receiver's tracking set by adding some aircraft."""
316 | self.tracker.add(receiver, icao_set)
317 | if receiver.last_rate_report is None:
318 | # not receiving rate reports for this receiver
319 | self.tracker.update_interest(receiver)
320 |
321 | @profile.trackcpu
322 | def receiver_tracking_remove(self, receiver, icao_set):
323 | """Update a receiver's tracking set by removing some aircraft."""
324 | self.tracker.remove(receiver, icao_set)
325 | if receiver.last_rate_report is None:
326 | # not receiving rate reports for this receiver
327 | self.tracker.update_interest(receiver)
328 |
329 | @profile.trackcpu
330 | def receiver_clock_reset(self, receiver):
331 | """Reset current clock synchronization for a receiver."""
332 | self.clock_tracker.receiver_clock_reset(receiver)
333 |
334 | @profile.trackcpu
335 | def receiver_rate_report(self, receiver, report):
336 | """Process an ADS-B position rate report for a receiver."""
337 | receiver.last_rate_report = report
338 | self.tracker.update_interest(receiver)
339 |
340 | @profile.trackcpu
341 | def forward_results(self, receive_timestamp, address, ecef, ecef_cov, receivers, distinct, dof, kalman_state):
342 | broadcast = receivers
343 | ac = self.tracker.aircraft.get(address)
344 | if ac:
345 | ac.successful_mlat.update(receivers)
346 | broadcast = ac.successful_mlat
347 | for receiver in broadcast:
348 | try:
349 | receiver.connection.report_mlat_position(receiver,
350 | receive_timestamp, address,
351 | ecef, ecef_cov, receivers, distinct,
352 | dof, kalman_state)
353 | except Exception:
354 | glogger.exception("Failed to forward result to receiver {r}".format(r=receiver.uuid))
355 | # eat the exception so it doesn't break our caller
356 |
--------------------------------------------------------------------------------
/mlat/server/mlattrack.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | The multilateration tracker: pairs up copies of the same message seen by more
21 | than one receiver, clusters them by time, and passes them on to the solver to
22 | derive positions.
23 | """
24 |
25 | import json
26 | import asyncio
27 | import logging
28 | import operator
29 | import numpy
30 | from contextlib import closing
31 |
32 | import modes.message
33 | from mlat import geodesy, constants, profile
34 | from mlat.server import clocknorm, solver, config
35 |
36 | glogger = logging.getLogger("mlattrack")
37 |
38 |
39 | class MessageGroup:
40 | def __init__(self, message, first_seen):
41 | self.message = message
42 | self.first_seen = first_seen
43 | self.copies = []
44 | self.handle = None
45 |
46 |
47 | class MlatTracker(object):
48 | def __init__(self, coordinator, blacklist_filename=None, pseudorange_filename=None):
49 | self.pending = {}
50 | self.coordinator = coordinator
51 | self.tracker = coordinator.tracker
52 | self.clock_tracker = coordinator.clock_tracker
53 | self.blacklist_filename = blacklist_filename
54 | self.read_blacklist()
55 | self.coordinator.add_sighup_handler(self.read_blacklist)
56 |
57 | self.pseudorange_file = None
58 | self.pseudorange_filename = pseudorange_filename
59 | if self.pseudorange_filename:
60 | self.reopen_pseudoranges()
61 | self.coordinator.add_sighup_handler(self.reopen_pseudoranges)
62 |
63 | def read_blacklist(self):
64 | s = set()
65 | if self.blacklist_filename:
66 | try:
67 | with closing(open(self.blacklist_filename, 'r')) as f:
68 | user = f.readline().strip()
69 | if user:
70 | s.add(user)
71 | except FileNotFoundError:
72 | pass
73 |
74 | glogger.info("Read {n} blacklist entries".format(n=len(s)))
75 |
76 | self.blacklist = s
77 |
78 | def reopen_pseudoranges(self):
79 | if self.pseudorange_file:
80 | self.pseudorange_file.close()
81 | self.pseudorange_file = None
82 |
83 | self.pseudorange_file = open(self.pseudorange_filename, 'a')
84 |
85 | @profile.trackcpu
86 | def receiver_mlat(self, receiver, timestamp, message, utc):
87 | # use message as key
88 | group = self.pending.get(message)
89 | if not group:
90 | group = self.pending[message] = MessageGroup(message, utc)
91 | group.handle = asyncio.get_event_loop().call_later(
92 | config.MLAT_DELAY,
93 | self._resolve,
94 | group)
95 |
96 | group.copies.append((receiver, timestamp, utc))
97 | group.first_seen = min(group.first_seen, utc)
98 |
99 | @profile.trackcpu
100 | def _resolve(self, group):
101 | del self.pending[group.message]
102 |
103 | # less than 3 messages -> no go
104 | if len(group.copies) < 3:
105 | return
106 |
107 | decoded = modes.message.decode(group.message)
108 |
109 | ac = self.tracker.aircraft.get(decoded.address)
110 | if not ac:
111 | return
112 |
113 | ac.mlat_message_count += 1
114 |
115 | if not ac.allow_mlat:
116 | glogger.info("not doing mlat for {0:06x}, wrong partition!".format(ac.icao))
117 | return
118 |
119 | # When we've seen a few copies of the same message, it's
120 | # probably correct. Update the tracker with newly seen
121 | # altitudes, squawks, callsigns.
122 | if decoded.altitude is not None:
123 | ac.altitude = decoded.altitude
124 | ac.last_altitude_time = group.first_seen
125 |
126 | if decoded.squawk is not None:
127 | ac.squawk = decoded.squawk
128 |
129 | if decoded.callsign is not None:
130 | ac.callsign = decoded.callsign
131 |
132 | # find old result, if present
133 | if ac.last_result_position is None or (group.first_seen - ac.last_result_time) > 120:
134 | last_result_position = None
135 | last_result_var = 1e9
136 | last_result_dof = 0
137 | last_result_time = group.first_seen - 120
138 | else:
139 | last_result_position = ac.last_result_position
140 | last_result_var = ac.last_result_var
141 | last_result_dof = ac.last_result_dof
142 | last_result_time = ac.last_result_time
143 |
144 | # find altitude
145 | if ac.altitude is None:
146 | altitude = None
147 | altitude_dof = 0
148 | else:
149 | altitude = ac.altitude * constants.FTOM
150 | altitude_dof = 1
151 |
152 | # construct a map of receiver -> list of timestamps
153 | timestamp_map = {}
154 | for receiver, timestamp, utc in group.copies:
155 | if receiver.user not in self.blacklist:
156 | timestamp_map.setdefault(receiver, []).append((timestamp, utc))
157 |
158 | # check for minimum needed receivers
159 | dof = len(timestamp_map) + altitude_dof - 4
160 | if dof < 0:
161 | return
162 |
163 | # basic ratelimit before we do more work
164 | elapsed = group.first_seen - last_result_time
165 | if elapsed < 15.0 and dof < last_result_dof:
166 | return
167 |
168 | if elapsed < 2.0 and dof == last_result_dof:
169 | return
170 |
171 | # normalize timestamps. This returns a list of timestamp maps;
172 | # within each map, the timestamp values are comparable to each other.
173 | components = clocknorm.normalize(clocktracker=self.clock_tracker,
174 | timestamp_map=timestamp_map)
175 |
176 | # cluster timestamps into clusters that are probably copies of the
177 | # same transmission.
178 | clusters = []
179 | min_component_size = 4 - altitude_dof
180 | for component in components:
181 | if len(component) >= min_component_size: # don't bother with orphan components at all
182 | clusters.extend(_cluster_timestamps(component, min_component_size))
183 |
184 | if not clusters:
185 | return
186 |
187 | # start from the most recent, largest, cluster
188 | result = None
189 | clusters.sort(key=lambda x: (x[0], x[1]))
190 | while clusters and not result:
191 | distinct, cluster_utc, cluster = clusters.pop()
192 |
193 | # accept fewer receivers after 10s
194 | # accept the same number of receivers after MLAT_DELAY - 0.5s
195 | # accept more receivers immediately
196 |
197 | elapsed = cluster_utc - last_result_time
198 | dof = distinct + altitude_dof - 4
199 |
200 | if elapsed < 10.0 and dof < last_result_dof:
201 | break
202 |
203 | if elapsed < (config.MLAT_DELAY - 0.5) and dof == last_result_dof:
204 | break
205 |
206 | # assume 250ft accuracy at the time it is reported
207 | # (this bundles up both the measurement error, and
208 | # that we don't adjust for local pressure)
209 | #
210 | # Then degrade the accuracy over time at ~4000fpm
211 | if decoded.altitude is not None:
212 | altitude_error = 250 * constants.FTOM
213 | elif altitude is not None:
214 | altitude_error = (250 + (cluster_utc - ac.last_altitude_time) * 70) * constants.FTOM
215 | else:
216 | altitude_error = None
217 |
218 | cluster.sort(key=operator.itemgetter(1)) # sort by increasing timestamp (todo: just assume descending..)
219 | r = solver.solve(cluster, altitude, altitude_error,
220 | last_result_position if last_result_position else cluster[0][0].position)
221 | if r:
222 | # estimate the error
223 | ecef, ecef_cov = r
224 | if ecef_cov is not None:
225 | var_est = numpy.trace(ecef_cov)
226 | else:
227 | # this result is suspect
228 | var_est = 100e6
229 |
230 | if var_est > 100e6:
231 | # more than 10km, too inaccurate
232 | continue
233 |
234 | if elapsed < 2.0 and var_est > last_result_var * 1.1:
235 | # less accurate than a recent position
236 | continue
237 |
238 | #if elapsed < 10.0 and var_est > last_result_var * 2.25:
239 | # # much less accurate than a recent-ish position
240 | # continue
241 |
242 | # accept it
243 | result = r
244 |
245 | if not result:
246 | return
247 |
248 | ecef, ecef_cov = result
249 | ac.last_result_position = ecef
250 | ac.last_result_var = var_est
251 | ac.last_result_dof = dof
252 | ac.last_result_time = cluster_utc
253 | ac.mlat_result_count += 1
254 |
255 | if ac.kalman.update(cluster_utc, cluster, altitude, altitude_error, ecef, ecef_cov, distinct, dof):
256 | ac.mlat_kalman_count += 1
257 |
258 | if altitude is None:
259 | _, _, solved_alt = geodesy.ecef2llh(ecef)
260 | glogger.info("{addr:06x} solved altitude={solved_alt:.0f}ft with dof={dof}".format(
261 | addr=decoded.address,
262 | solved_alt=solved_alt*constants.MTOF,
263 | dof=dof))
264 |
265 | for handler in self.coordinator.output_handlers:
266 | handler(cluster_utc, decoded.address,
267 | ecef, ecef_cov,
268 | [receiver for receiver, timestamp, error in cluster], distinct, dof,
269 | ac.kalman)
270 |
271 | if self.pseudorange_file:
272 | cluster_state = []
273 | t0 = cluster[0][1]
274 | for receiver, timestamp, variance in cluster:
275 | cluster_state.append([round(receiver.position[0], 0),
276 | round(receiver.position[1], 0),
277 | round(receiver.position[2], 0),
278 | round((timestamp-t0)*1e6, 1),
279 | round(variance*1e12, 2)])
280 |
281 | state = {'icao': '{a:06x}'.format(a=decoded.address),
282 | 'time': round(cluster_utc, 3),
283 | 'ecef': [round(ecef[0], 0),
284 | round(ecef[1], 0),
285 | round(ecef[2], 0)],
286 | 'distinct': distinct,
287 | 'dof': dof,
288 | 'cluster': cluster_state}
289 |
290 | if ecef_cov is not None:
291 | state['ecef_cov'] = [round(ecef_cov[0, 0], 0),
292 | round(ecef_cov[0, 1], 0),
293 | round(ecef_cov[0, 2], 0),
294 | round(ecef_cov[1, 0], 0),
295 | round(ecef_cov[1, 1], 0),
296 | round(ecef_cov[1, 2], 0),
297 | round(ecef_cov[2, 0], 0),
298 | round(ecef_cov[2, 1], 0),
299 | round(ecef_cov[2, 2], 0)]
300 |
301 | if altitude is not None:
302 | state['altitude'] = round(altitude, 0)
303 | state['altitude_error'] = round(altitude_error, 0)
304 |
305 | json.dump(state, self.pseudorange_file)
306 | self.pseudorange_file.write('\n')
307 |
308 |
309 | @profile.trackcpu
310 | def _cluster_timestamps(component, min_receivers):
311 | """Given a component that has normalized timestamps:
312 |
313 | {
314 | receiver: (variance, [(timestamp, utc), ...]), ...
315 | receiver: (variance, [(timestamp, utc), ...]), ...
316 | }, ...
317 |
318 | return a list of clusters, where each cluster is a tuple:
319 |
320 | (distinct, first_seen, [(receiver, timestamp, variance, utc), ...])
321 |
322 | with distinct as the number of distinct receivers;
323 | first_seen as the first UTC time seen in the cluster
324 | """
325 |
326 | #glogger.info("cluster these:")
327 |
328 | # flatten the component into a list of tuples
329 | flat_component = []
330 | for receiver, (variance, timestamps) in component.items():
331 | for timestamp, utc in timestamps:
332 | #glogger.info(" {r} {t:.1f}us {e:.1f}us".format(r=receiver.user, t=timestamp*1e6, e=error*1e6))
333 | flat_component.append((receiver, timestamp, variance, utc))
334 |
335 | # sort by timestamp
336 | flat_component.sort(key=operator.itemgetter(1))
337 |
338 | # do a rough clustering: groups of items with inter-item spacing of less than 2ms
339 | group = [flat_component[0]]
340 | groups = [group]
341 | for t in flat_component[1:]:
342 | if (t[1] - group[-1][1]) > 2e-3:
343 | group = [t]
344 | groups.append(group)
345 | else:
346 | group.append(t)
347 |
348 | # inspect each group and produce clusters
349 | # this is about O(n^2)-ish with group size, which
350 | # is why we try to break up the component into
351 | # smaller groups first.
352 |
353 | #glogger.info("{n} groups".format(n=len(groups)))
354 |
355 | clusters = []
356 | for group in groups:
357 | #glogger.info(" group:")
358 | #for r, t, e in group:
359 | # glogger.info(" {r} {t:.1f}us {e:.1f}us".format(r=r.user, t=t*1e6, e=e*1e6))
360 |
361 | while len(group) >= min_receivers:
362 | receiver, timestamp, variance, utc = group.pop()
363 | cluster = [(receiver, timestamp, variance)]
364 | last_timestamp = timestamp
365 | distinct_receivers = 1
366 | first_seen = utc
367 |
368 | #glogger.info("forming cluster from group:")
369 | #glogger.info(" 0 = {r} {t:.1f}us".format(r=head[0].user, t=head[1]*1e6))
370 |
371 | for i in range(len(group) - 1, -1, -1):
372 | receiver, timestamp, variance, utc = group[i]
373 | #glogger.info(" consider {i} = {r} {t:.1f}us".format(i=i, r=receiver.user, t=timestamp*1e6))
374 | if (last_timestamp - timestamp) > 2e-3:
375 | # Can't possibly be part of the same cluster.
376 | #
377 | # Note that this is a different test to the rough grouping above:
378 | # that looks at the interval betwen _consecutive_ items, so a
379 | # group might span a lot more than 2ms!
380 | #glogger.info(" discard: >2ms out")
381 | break
382 |
383 | # strict test for range, now.
384 | is_distinct = can_cluster = True
385 | for other_receiver, other_timestamp, other_variance in cluster:
386 | if other_receiver is receiver:
387 | #glogger.info(" discard: duplicate receiver")
388 | can_cluster = False
389 | break
390 |
391 | d = receiver.distance[other_receiver]
392 | if abs(other_timestamp - timestamp) > (d * 1.05 + 1e3) / constants.Cair:
393 | #glogger.info(" discard: delta {dt:.1f}us > max {m:.1f}us for range {d:.1f}m".format(
394 | # dt=abs(other_timestamp - timestamp)*1e6,
395 | # m=(d * 1.05 + 1e3) / constants.Cair*1e6,
396 | # d=d))
397 | can_cluster = False
398 | break
399 |
400 | if d < 1e3:
401 | # if receivers are closer than 1km, then
402 | # only count them as one receiver for the 3-receiver
403 | # requirement
404 | #glogger.info(" not distinct vs receiver {r}".format(r=other_receiver.user))
405 | is_distinct = False
406 |
407 | if can_cluster:
408 | #glogger.info(" accept")
409 | cluster.append((receiver, timestamp, variance))
410 | first_seen = min(first_seen, utc)
411 | del group[i]
412 | if is_distinct:
413 | distinct_receivers += 1
414 |
415 | if distinct_receivers >= min_receivers:
416 | cluster.reverse() # make it ascending timestamps again
417 | clusters.append((distinct_receivers, first_seen, cluster))
418 |
419 | return clusters
420 |
--------------------------------------------------------------------------------
/mlat/server/kalman.py:
--------------------------------------------------------------------------------
1 | # -*- mode: python; indent-tabs-mode: nil -*-
2 |
3 | # Part of mlat-server: a Mode S multilateration server
4 | # Copyright (C) 2015 Oliver Jowett
5 |
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation, either version 3 of the
9 | # License, or (at your option) any later version.
10 |
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 |
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | """
20 | Kalman tracking filter to smooth the results of multilateration and derive
21 | speed/heading information.
22 | """
23 |
24 | import math
25 | import numpy
26 | import pykalman.unscented
27 | import functools
28 | import logging
29 |
30 | from mlat import geodesy, constants, profile
31 |
32 | glogger = logging.getLogger("kalman")
33 |
34 |
35 | class KalmanState(object):
36 | """Kalman filter state for a single aircraft.
37 |
38 | Should be subclassed to provide implementations of
39 | set_initial_state(), transition_function(),
40 | transition_covariance().
41 |
42 | The state matrix is assumed to have position/velocity
43 | as the first 6 components.
44 | """
45 |
46 | # defaults:
47 | # minimum DOF to update a filter while acquiring
48 | min_acquiring_dof = 1
49 | # minimum DOF to update a filter while tracking
50 | min_tracking_dof = 0
51 | # Mahalanobis distance threshold for outliers
52 | outlier_mahalanobis_distance = 15.0
53 | # position error threshold for switching from acquiring to tracking, meters
54 | min_acquiring_position_error = 3e3
55 | # velocity error threshold for switching from acquiring to tracking, m/s
56 | min_acquiring_velocity_error = 50
57 | # position error threshold for switching from tracking to acquiring, meters
58 | max_tracking_position_error = 5e3
59 | # velocity error threshold for switching from tracking to acquiring, m/s
60 | max_tracking_velocity_error = 75
61 | # process noise, m/s^2 or m/s^3
62 | process_noise = 0.10
63 |
64 | def __init__(self, icao):
65 | self.icao = icao
66 | self._reset()
67 |
68 | def _reset(self):
69 | # the filter itself:
70 | self._mean = None
71 | self._cov = None
72 | self._acquiring = True
73 | self._outliers = 0
74 | self.last_update = None
75 |
76 | # does the filter have useful data?
77 | self.valid = False
78 |
79 | # most recent values derived from filter state
80 | self.position = None # ECEF
81 | self.velocity = None # ECEF
82 | self.position_error = None # meters
83 | self.velocity_error = None # m/s
84 |
85 | # .. some derived values in more useful reference frames
86 | self.position_llh = None # LLH
87 | self.velocity_enu = None # ENU
88 | self.heading = None # degrees
89 | self.ground_speed = None # m/s
90 | self.vertical_speed = None # m/s
91 |
92 | def observation_function_without_altitude(self, state, *, positions):
93 | """Kalman filter observation function.
94 |
95 | Given state (position,...) and a list of N receiver positions,
96 | return N-1 pseudorange observations; the pseudoranges are
97 | relative to the first receiver's pseudorange."""
98 |
99 | x, y, z = state[0:3]
100 |
101 | n = len(positions)
102 | obs = numpy.zeros(n-1)
103 |
104 | rx, ry, rz = positions[0]
105 | zero_range = ((rx - x)**2 + (ry - y)**2 + (rz - z)**2)**0.5
106 |
107 | for i in range(1, n):
108 | rx, ry, rz = positions[i]
109 | obs[i-1] = ((rx - x)**2 + (ry - y)**2 + (rz - z)**2)**0.5 - zero_range
110 |
111 | return obs
112 |
113 | def observation_function_with_altitude(self, state, *, positions):
114 | """Kalman filter observation function.
115 |
116 | Given state (position,...) and a list of N receiver positions,
117 | return an altitude observation and N-1 pseudorange observations; the
118 | pseudoranges are relative to the first receiver's pseudorange."""
119 |
120 | x, y, z = state[0:3]
121 |
122 | n = len(positions)
123 | obs = numpy.zeros(n)
124 |
125 | _, _, obs[0] = geodesy.ecef2llh((x, y, z))
126 |
127 | rx, ry, rz = positions[0]
128 | zero_range = ((rx - x)**2 + (ry - y)**2 + (rz - z)**2)**0.5
129 |
130 | for i in range(1, n):
131 | rx, ry, rz = positions[i]
132 | obs[i] = ((rx - x)**2 + (ry - y)**2 + (rz - z)**2)**0.5 - zero_range
133 |
134 | return obs
135 |
136 | def _update_derived(self):
137 | """Update derived values from self._mean and self._cov"""
138 |
139 | self.position = self._mean[0:3]
140 | self.velocity = self._mean[3:6]
141 |
142 | pe = numpy.trace(self._cov[0:3, 0:3])
143 | self.position_error = 1e6 if pe < 0 else math.sqrt(pe)
144 | ve = numpy.trace(self._cov[3:6, 3:6])
145 | self.velocity_error = 1e6 if ve < 0 else math.sqrt(ve)
146 |
147 | lat, lon, alt = self.position_llh = geodesy.ecef2llh(self.position)
148 |
149 | # rotate velocity into the local tangent plane
150 | lat_r = lat * constants.DTOR
151 | lon_r = lon * constants.DTOR
152 | C = numpy.array([[-math.sin(lon_r), math.cos(lon_r), 0],
153 | [math.sin(-lat_r) * math.cos(lon_r), math.sin(-lat_r) * math.sin(lon_r), math.cos(-lat_r)],
154 | [math.cos(-lat_r) * math.cos(lon_r), math.cos(-lat_r) * math.sin(lon_r), -math.sin(-lat_r)]])
155 | east, north, up = self.velocity_enu = numpy.dot(C, self.velocity.T).T
156 |
157 | # extract speeds, headings
158 | self.heading = math.atan2(east, north) * 180.0 / math.pi
159 | if self.heading < 0:
160 | self.heading += 360
161 | self.ground_speed = math.sqrt(north**2 + east**2)
162 | self.vertical_speed = up
163 |
164 | self.valid = True
165 |
166 | @profile.trackcpu
167 | def update(self, position_time, measurements, altitude, altitude_error,
168 | leastsquares_position, leastsquares_cov, distinct, dof):
169 | """Update the filter given a new set of observations.
170 |
171 | position_time: the time of these measurements, UTC seconds
172 | measurements: a list of (receiver, timestamp, variance) tuples
173 | altitude: reported altitude in meters, or None
174 | altitude_error: reported altitude error in meters, or None
175 | leastsquares_position: the ECEF position computed by the least-squares
176 | solver
177 | leastsquares_cov: the covariance of leastsquares_position
178 | distinct: the number of distinct receivers
179 | dof: the number of degrees of freedom in the solution
180 | """
181 |
182 | if self._acquiring and dof < self.min_acquiring_dof:
183 | # don't trust this result until we have converged
184 | return False
185 |
186 | if self._mean is None:
187 | # acquire an initial position
188 | glogger.info("{icao:06X} acquiring.".format(icao=self.icao))
189 | self.last_update = position_time
190 | self.set_initial_state(leastsquares_position, leastsquares_cov)
191 | return False
192 |
193 | if dof < self.min_tracking_dof:
194 | # don't use this one
195 | return False
196 |
197 | # update filter
198 | zero_pr = measurements[0][1] * constants.Cair
199 | positions = [measurements[0][0].position]
200 |
201 | n = len(measurements)
202 |
203 | if altitude is None:
204 | obs_fn = self.observation_function_without_altitude
205 | obs = numpy.zeros(n-1)
206 | obs_var = numpy.zeros(n-1)
207 |
208 | for i in range(1, n):
209 | receiver, timestamp, variance = measurements[i]
210 | positions.append(receiver.position)
211 | obs[i-1] = timestamp * constants.Cair - zero_pr
212 | obs_var[i-1] = (variance + measurements[0][2]) * constants.Cair**2
213 | else:
214 | obs_fn = self.observation_function_with_altitude
215 | obs = numpy.zeros(n)
216 | obs_var = numpy.zeros(n)
217 |
218 | obs[0] = altitude
219 | obs_var[0] = altitude_error**2
220 |
221 | for i in range(1, n):
222 | receiver, timestamp, variance = measurements[i]
223 | positions.append(receiver.position)
224 | obs[i] = timestamp * constants.Cair - zero_pr
225 | obs_var[i] = (variance + measurements[0][2]) * constants.Cair**2
226 |
227 | obs_covar = numpy.diag(obs_var)
228 |
229 | dt = position_time - self.last_update
230 | if dt < 0:
231 | return False
232 |
233 | try:
234 | trans_covar = self.transition_covariance(dt)
235 | transition_function = functools.partial(self.transition_function,
236 | dt=dt)
237 | observation_function = functools.partial(obs_fn,
238 | positions=positions)
239 |
240 | #
241 | # This is extracted from pykalman's
242 | # AdditiveUnscentedFilter.filter_update() because we want to access
243 | # the intermediate (prediction) result to decide whether to accept
244 | # this observation or not.
245 | #
246 |
247 | # make sigma points
248 | moments_state = pykalman.unscented.Moments(self._mean, self._cov)
249 | points_state = pykalman.unscented.moments2points(moments_state)
250 |
251 | # Predict.
252 | (_, moments_pred) = (
253 | pykalman.unscented.unscented_filter_predict(
254 | transition_function=transition_function,
255 | points_state=points_state,
256 | sigma_transition=trans_covar
257 | )
258 | )
259 | points_pred = pykalman.unscented.moments2points(moments_pred)
260 |
261 | # Decide whether this is an outlier:
262 | # Get the predicted filter state mean and covariance
263 | # as an observation:
264 | (obs_points_pred, obs_moments_pred) = (
265 | pykalman.unscented.unscented_transform(
266 | points_pred, observation_function,
267 | sigma_noise=obs_covar
268 | )
269 | )
270 |
271 | # Find the Mahalanobis distance between the predicted observation
272 | # and our new observation, using the predicted observation's
273 | # covariance as our expected distribution.
274 | innovation = obs - obs_moments_pred.mean
275 | vi = numpy.linalg.inv(obs_moments_pred.covariance)
276 | md = math.sqrt(numpy.dot(numpy.dot(innovation.T, vi), innovation))
277 |
278 | # If the Mahalanobis distance is very large this observation is an
279 | # outlier
280 | if md > self.outlier_mahalanobis_distance:
281 | glogger.info("{icao:06X} outlier: md={md:.1f}".format(
282 | icao=self.icao,
283 | md=md))
284 |
285 | self._outliers += 1
286 | if self._outliers < 3 or (position_time - self.last_update) < 15.0:
287 | # don't use this one
288 | return False
289 | glogger.info("{icao:06X} reset due to outliers.".format(icao=self.icao))
290 | self._reset()
291 | return False
292 |
293 | self._outliers = 0
294 |
295 | # correct filter state using the current observation
296 | (self._mean, self._cov) = (
297 | pykalman.unscented.unscented_filter_correct(
298 | observation_function=observation_function,
299 | moments_pred=moments_pred,
300 | points_pred=points_pred,
301 | observation=obs,
302 | sigma_observation=obs_covar
303 | )
304 | )
305 |
306 | self.last_update = position_time
307 | self._update_derived()
308 |
309 | # converged enough to start reporting?
310 | if ((self._acquiring and
311 | self.position_error < self.min_acquiring_position_error and
312 | self.velocity_error < self.min_acquiring_velocity_error)):
313 | glogger.info("{icao:06X} acquired.".format(icao=self.icao))
314 | self._acquiring = False
315 | elif (not self._acquiring and
316 | (self.position_error > self.max_tracking_position_error or
317 | self.velocity_error > self.max_tracking_velocity_error)):
318 | glogger.info("{icao:06X} tracking lost".format(icao=self.icao))
319 | self._acquiring = True
320 |
321 | self.valid = not self._acquiring
322 | return self.valid
323 |
324 | except Exception:
325 | glogger.exception("Kalman filter update failed. " +
326 | "dt={dt} obs={obs} obs_covar={obs_covar} mean={mean} covar={covar}".format(
327 | dt=dt,
328 | obs=obs,
329 | obs_covar=obs_covar,
330 | mean=self._mean,
331 | covar=self._cov))
332 | self._reset()
333 | return False
334 |
335 | def set_initial_state(self, leastsquares_position, leastsquares_cov):
336 | """Set the initial state of the filter from a least-squares result.
337 |
338 | Should set self._mean and self._cov.
339 | """
340 |
341 | raise NotImplementedError()
342 |
343 | def transition_function(self, state, *, dt):
344 | """Kalman filter transition function.
345 |
346 | Given the current state and a timestep, return the
347 | next predicted state."""
348 |
349 | raise NotImplementedError()
350 |
351 | def transition_covariance(self, dt):
352 | """Kalman filter transition covariance.
353 |
354 | Given a timestep, return the covariance of the
355 | process noise."""
356 |
357 | raise NotImplementedError()
358 |
359 |
360 | class KalmanStateCV(KalmanState):
361 | """Kalman filter with a constant-velocity model."""
362 |
363 | accel_noise = 0.5 # m/s^2
364 |
365 | def set_initial_state(self, leastsquares_position, leastsquares_cov):
366 | """State is: (position, velocity)"""
367 |
368 | self._mean = numpy.array(list(leastsquares_position) + [0, 0, 0])
369 | self._cov = numpy.zeros((6, 6))
370 | self._cov[0:3, 0:3] = leastsquares_cov * 4
371 | self._cov[3, 3] = self._cov[4, 4] = self._cov[5, 5] = 200**2
372 |
373 | def transition_function(self, state, *, dt):
374 | x, y, z, vx, vy, vz = state
375 | return numpy.array([x + vx*dt, y + vy*dt, z + vz*dt, vx, vy, vz])
376 |
377 | def transition_covariance(self, dt):
378 | trans_covar = numpy.zeros((6, 6))
379 | trans_covar[0, 0] = trans_covar[1, 1] = trans_covar[2, 2] = 0.25*dt**4
380 | trans_covar[3, 3] = trans_covar[4, 4] = trans_covar[5, 5] = dt**2
381 | trans_covar[0, 3] = trans_covar[3, 0] = 0.5*dt**3
382 | trans_covar[1, 4] = trans_covar[4, 1] = 0.5*dt**3
383 | trans_covar[2, 5] = trans_covar[5, 2] = 0.5*dt**3
384 |
385 | # we assume that process_noise is white noise (uncorrelated) and so
386 | # scale by dt not dt**2 here
387 | return trans_covar * self.process_noise**2 * dt
388 |
389 |
390 | class KalmanStateCA(KalmanState):
391 | """Kalman filter with a constant-acceleration model."""
392 |
393 | def set_initial_state(self, leastsquares_position, leastsquares_cov):
394 | """State is: (position, velocity, acceleration)"""
395 |
396 | self._mean = numpy.array(list(leastsquares_position) + [0, 0, 0, 0, 0, 0])
397 | self._cov = numpy.zeros((9, 9))
398 | self._cov[0:3, 0:3] = leastsquares_cov * 4
399 | self._cov[3, 3] = self._cov[4, 4] = self._cov[5, 5] = 200**2
400 | self._cov[6, 6] = self._cov[7, 7] = self._cov[8, 8] = 1
401 |
402 | def transition_function(self, state, *, dt):
403 | x, y, z, vx, vy, vz, ax, ay, az = state
404 | return numpy.array([x + vx*dt + 0.5*ax*dt**2,
405 | y + vy*dt + 0.5*ay*dt**2,
406 | z + vz*dt + 0.5*az*dt**2,
407 | vx + ax*dt,
408 | vy + ay*dt,
409 | vz + az*dt,
410 | ax,
411 | ay,
412 | az])
413 |
414 | def transition_covariance(self, dt):
415 | trans_covar = numpy.zeros((9, 9))
416 | trans_covar[0, 0] = trans_covar[1, 1] = trans_covar[2, 2] = 0.25*dt**4
417 | trans_covar[3, 3] = trans_covar[4, 4] = trans_covar[5, 5] = dt**2
418 | trans_covar[6, 6] = trans_covar[7, 7] = trans_covar[8, 8] = 1.0
419 |
420 | trans_covar[0, 3] = trans_covar[3, 0] = 0.5*dt**3
421 | trans_covar[1, 4] = trans_covar[4, 1] = 0.5*dt**3
422 | trans_covar[2, 5] = trans_covar[5, 2] = 0.5*dt**3
423 |
424 | trans_covar[0, 6] = trans_covar[6, 0] = 0.5*dt**2
425 | trans_covar[1, 7] = trans_covar[7, 1] = 0.5*dt**2
426 | trans_covar[2, 8] = trans_covar[8, 2] = 0.5*dt**2
427 |
428 | trans_covar[3, 6] = trans_covar[6, 3] = dt
429 | trans_covar[4, 7] = trans_covar[7, 4] = dt
430 | trans_covar[5, 8] = trans_covar[8, 5] = dt
431 |
432 | # we assume that process_noise is white noise (uncorrelated) and so
433 | # scale by dt not dt**2 here
434 | return trans_covar * self.process_noise**2 * dt
435 |
--------------------------------------------------------------------------------