├── .gitignore
├── Area
├── Grid.py
├── Vessel.py
└── __init__.py
├── BinarySearch.py
├── CPA
├── Cpa.py
├── DCPA_TCPA.py
├── Distance.py
└── __init__.py
├── DataProcess
├── FinalData.py
├── OptimizeMetaData.py
├── SourceData.py
├── channel_geojson
│ └── map.geojson
└── data_process.py
├── README.md
├── RiskOfCollisionPicture
└── 2016-10-01
│ └── Distribution_of_collision_risk_in_Yangtze_river_estuary(2016-10-01).png
├── ShipDataInsert
├── AsyncMotorDataSet
│ └── AsyncShipData.py
└── SyncPymongoDataSet
│ ├── ShipDataInsert.py
│ └── mongo_example.py
├── ShipDataQuery
├── .DS_Store
├── ChinaCoastalData.py
└── ComplexEncoder.py
├── _Time
├── ParseTime.py
├── TimeStamp.py
└── __init__.py
├── main.py
├── test
├── Readtest.txt
├── test_async_query.py
├── test_grid_index.py
├── test_sync_query.py
└── test_time.md
└── test_grid.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
--------------------------------------------------------------------------------
/Area/Grid.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 |
5 |
6 | class Grid:
7 |
8 | def __init__(self, area_id, *, gridlon_, gridlat_, grid_delta):
9 | # 根据传进来的网格编号area_id, 可以直接定位该网格左下角的经纬度
10 | # 省了两层嵌套for循环
11 |
12 | # 网格属性
13 | self.area_id = area_id
14 | self.gridlon_ = gridlon_
15 | self.gridlat_ = gridlat_
16 | self.grid_delta = grid_delta
17 | self.gridlon__length = self.gridlon_.size
18 |
19 | self.Leftlon_ = self.gridlon_[area_id % self.gridlon__length]
20 | self.Leftlat_ = self.gridlat_[area_id // self.gridlon__length]
21 | self.Rightlon_ = self.Leftlon_ + self.grid_delta
22 | self.Rightlat_ = self.Leftlat_ + self.grid_delta
23 | self.flag = 3
24 | if self.Leftlat_ == min(self.gridlat_) or self.Leftlat_ == max(self.gridlat_):
25 | self.flag = 2
26 | if self.Leftlon_ == min(self.gridlon_) or self.Leftlon_ == max(self.gridlon_):
27 | self.flag = 1
28 | if self.Leftlon_ == min(self.gridlon_) or self.Leftlon_ == max(self.gridlon_):
29 | self.flag = 2
30 | if self.Leftlat_ == min(self.gridlat_) or self.Leftlat_ == max(self.gridlat_):
31 | self.flag = 1
32 | self.need_ID = None
33 |
34 | def need_id(self):
35 | if self.flag == 3: # 中部网格计算, flag=3
36 | id_1 = self.area_id
37 | id_2 = self.area_id + 1
38 | id_3 = self.area_id + self.gridlon__length - 1
39 | id_4 = self.area_id + self.gridlon__length
40 | id_5 = self.area_id + self.gridlon__length + 1
41 | self.need_ID = [id_1, id_2, id_3, id_4, id_5]
42 | elif self.flag == 2: # 4种边界网格计算, flag=2
43 | if self.Leftlat_ == min(self.gridlat_):
44 | id_1 = self.area_id
45 | id_2 = self.area_id + 1
46 | id_3 = self.area_id + self.gridlon__length - 1
47 | id_4 = self.area_id + self.gridlon__length
48 | id_5 = self.area_id + self.gridlon__length + 1
49 | self.need_ID = [id_1, id_2, id_3, id_4, id_5]
50 | elif self.Leftlat_ == max(self.gridlat_):
51 | id_1 = self.area_id
52 | id_2 = self.area_id + 1
53 | self.need_ID = [id_1, id_2]
54 | elif self.Leftlon_ == min(self.gridlon_):
55 | id_1 = self.area_id
56 | id_2 = self.area_id + 1
57 | id_3 = self.area_id + self.gridlon__length
58 | id_4 = self.area_id + self.gridlon__length + 1
59 | self.need_ID = [id_1, id_2, id_3, id_4]
60 | elif self.Leftlon_ == max(self.gridlon_):
61 | id_1 = self.area_id
62 | id_2 = self.area_id + self.gridlon__length - 1
63 | id_3 = self.area_id + self.gridlon__length
64 | self.need_ID = [id_1, id_2, id_3]
65 | elif self.flag == 1: # 网格4个角落, flag=1
66 | if self.Leftlon_ == min(self.gridlon_) and self.Leftlat_ == min(self.gridlat_):
67 | id_1 = self.area_id
68 | id_2 = self.area_id + 1
69 | id_3 = self.area_id + self.gridlon__length
70 | id_4 = self.area_id + self.gridlon__length + 1
71 | self.need_ID = [id_1, id_2, id_3, id_4]
72 | elif self.Leftlon_ == max(self.gridlon_) and self.Leftlat_ == min(self.gridlat_):
73 | id_1 = self.area_id
74 | id_2 = self.area_id + self.gridlon__length - 1
75 | id_3 = self.area_id + self.gridlon__length
76 | self.need_ID = [id_1, id_2, id_3]
77 | elif self.Leftlon_ == min(self.gridlon_) and self.Leftlat_ == max(self.gridlat_):
78 | id_1 = self.area_id
79 | id_2 = self.area_id + 1
80 | self.need_ID = [id_1, id_2]
81 | elif self.Leftlon_ == max(self.gridlon_) and self.Leftlat_ == max(self.gridlat_):
82 | id_1 = self.area_id
83 | self.need_ID = [id_1]
84 | return self.need_ID
--------------------------------------------------------------------------------
/Area/Vessel.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 | from Area.Grid import Grid
5 |
6 |
7 | class Vessel(Grid):
8 | def __init__(self, area_id, args, *, gridlon_, gridlat_, grid_delta):
9 | self.MMSI = args[0]
10 | self.TIME = args[1]
11 | self.LON = args[2]
12 | self.LAT = args[3]
13 | self.COG = args[4]
14 | self.SOG = args[5]
15 | Grid.__init__(
16 | self,
17 | area_id,
18 | gridlon_=gridlon_,
19 | gridlat_=gridlat_,
20 | grid_delta=grid_delta
21 | )
--------------------------------------------------------------------------------
/Area/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/8
--------------------------------------------------------------------------------
/BinarySearch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 | from _Time.ParseTime import parse_time
5 |
6 |
7 | def BinarySearch(sequence, value):
8 | """
9 | 二分法BinarySearch
10 | [注] 序列sequence必须采用顺序存储结构,而且表中元素按关键字有序排列
11 | sequence: 目标序列
12 | value: 在序列中查找的值
13 | return: 查找value的索引值
14 | """
15 | begin = 0
16 | end = len(sequence) - 1
17 | while begin <= end:
18 | middle = (begin + end) // 2
19 | # middle = int(begin + (value - sequence[begin])/(sequence[end] - sequence[begin])*(end-begin))
20 | if sequence[middle]["TIME"] < value:
21 | begin = middle + 1
22 | elif sequence[middle]["TIME"] > value:
23 | end = middle - 1
24 | else:
25 | return middle
26 |
--------------------------------------------------------------------------------
/CPA/Cpa.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 | from geohelper import bearing
5 | from CPA.Distance import get_distance_hav
6 | from math import sqrt, sin, cos, pi, acos
7 |
8 |
9 | def cpa(Tar_Ship, Ref_Ship):
10 | """
11 | 计算目标船与参考船之间的DCPA和DCPA
12 | Tar_Ship: 目标船
13 | Ref_Ship: 参考船
14 | return: DCPA、TCPA
15 | """
16 | Tar_Lat, Tar_Lon = float(Tar_Ship.LAT), float(Tar_Ship.LON)
17 | Tar_Cog, Tar_Sog = float(Tar_Ship.COG), float(Tar_Ship.SOG)
18 | Ref_Lat, Ref_Lon = float(Ref_Ship.LAT), float(Ref_Ship.LON)
19 | Ref_Cog, Ref_Sog = float(Ref_Ship.COG), float(Ref_Ship.SOG)
20 |
21 | # 两船之间的距离distance
22 | distance = get_distance_hav(Tar_Lat, Tar_Lon, Ref_Lat, Ref_Lon)
23 |
24 | alpha = Tar_Cog - Ref_Cog
25 | if alpha > 180:
26 | alpha -= 360
27 | elif alpha < -180:
28 | alpha += 360
29 |
30 | # 两船之间的相对速度Relative_Speed
31 | Relative_Speed = sqrt(Tar_Sog**2 + Ref_Sog**2 - 2*Tar_Sog*Ref_Sog*cos(alpha / 180.0 * pi))
32 | Q = acos((Relative_Speed**2 + Tar_Sog**2 - Ref_Sog**2) / (2 * Relative_Speed * Tar_Sog)) * 180.0 / pi
33 |
34 | # 两船之间的相对航向Relative_Course
35 | if alpha > 0:
36 | Relative_Course = Tar_Cog + Q
37 | else:
38 | Relative_Course = Tar_Cog - Q
39 | # 相对舷角Bearing
40 | Bearing = bearing.initial_compass_bearing(Tar_Lat, Tar_Lon, Ref_Lat, Ref_Lon) - Relative_Course
41 | DCPA = distance * sin(Bearing * pi / 180.0)
42 | TCPA = distance * cos(Bearing * pi / 180.0) / Relative_Speed
43 | return DCPA, TCPA
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/CPA/DCPA_TCPA.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/18
4 | import numpy as np
5 |
6 |
7 | class CPA:
8 | """
9 | 1. The Distance between Target Ship and Reference Ship
10 | get_distance_hav(*args):
11 | args = (lat1, lon1, lat2, lon2)
12 |
13 | 2. The Mid Point between Target Ship and Reference Ship
14 | min_point(*args)
15 | args = (lat1, lon1, lat2, lon2)
16 |
17 | 3. The DCPA and TCPA between Target Ship and Reference Ship
18 | cpa(*args):
19 | args = (Tar_Ship, Ref_Ship)
20 | Tar_Ship: Target Vessel Object, Ref_Ship: Reference Vessel Object
21 | """
22 | EARTH_RADIUS = 6378.1
23 | n_mile = 1.852
24 |
25 | def __init__(self, Tar_Ship, Ref_Ship):
26 | self.Tar_Lat = float(Tar_Ship.LAT)
27 | self.Tar_Lon = float(Tar_Ship.LON)
28 | self.Ref_Lat = float(Ref_Ship.LAT)
29 | self.Ref_Lon = float(Ref_Ship.LON)
30 | self.Tar_Cog = float(Tar_Ship.COG)
31 | self.Tar_Sog = float(Tar_Ship.SOG)
32 | self.Ref_Cog = float(Ref_Ship.COG)
33 | self.Ref_Sog = float(Ref_Ship.SOG)
34 |
35 | def haversine(self, theta):
36 | return pow(np.sin(theta / 2), 2)
37 |
38 | def distance(self):
39 | """
40 | Use Haversine formula To Calculate The Distance Between Tar_Ship and Ref_Ship On The Sphere
41 | return: The distance between Tar_Ship and Ref_Ship
42 | Unit: nm
43 | """
44 | Tar_Lat, Tar_Lon = np.radians((self.Tar_Lat, self.Tar_Lon))
45 | Ref_Lat, Ref_Lon = np.radians((self.Ref_Lat, self.Ref_Lon))
46 | diff_lon = np.fabs(Tar_Lon - Ref_Lon)
47 | diff_lat = np.fabs(Tar_Lat - Ref_Lat)
48 | h = self.haversine(diff_lat) + np.cos(Tar_Lat) * np.cos(Ref_Lat) * self.haversine(diff_lon)
49 | distance = 2 * CPA.EARTH_RADIUS * np.arcsin(np.sqrt(h))
50 | return distance / CPA.n_mile
51 |
52 | def mid_point(self):
53 | """
54 | mid_point(): Mid point between two latitude and longitude
55 | return: Mid Point(mid_lat, mid_lon)
56 | Unit: degree(°)
57 | """
58 | Tar_Lat, Tar_Lon = np.radians((self.Tar_Lat, self.Tar_Lon))
59 | Ref_Lat, Ref_Lon = np.radians((self.Ref_Lat, self.Ref_Lon))
60 | diff_lon = Ref_Lon - Tar_Lon
61 | Bx = np.cos(Ref_Lat) * np.cos(diff_lon)
62 | By = np.cos(Ref_Lat) * np.sin(diff_lon)
63 | x = np.sqrt((np.cos(Tar_Lat) + Bx) * (np.cos(Tar_Lat) + Bx) + pow(By, 2))
64 | y = np.sin(Tar_Lat) + np.sin(Ref_Lat)
65 | mid_lat = 180 / np.pi * np.arctan2(y, x)
66 | mid_lon = 180 / np.pi * (Tar_Lon + np.arctan2(By, np.cos(Tar_Lat) + Bx))
67 | return mid_lat, mid_lon
68 |
69 | def bearing(self):
70 |
71 | tar_lat, ref_lat = np.radians((self.Tar_Lat, self.Ref_Lat))
72 | diff_lon = np.radians(self.Ref_Lon-self.Tar_Lon)
73 |
74 | x = np.sin(diff_lon) * np.cos(ref_lat)
75 | y = (np.cos(tar_lat) * np.sin(ref_lat)) - (np.sin(tar_lat) * np.cos(ref_lat) * np.cos(diff_lon))
76 |
77 | inital_bearing = np.arctan2(x, y)
78 | inital_bearing = np.rad2deg(inital_bearing)
79 | compass_bearing = (inital_bearing + 360) % 360
80 | return compass_bearing
81 |
82 | def cpa(self):
83 | """
84 | The Method of Calculate DCPA and TCPA
85 | return: (DCPA, TCPA)
86 | """
87 | alpha = self.Tar_Cog - self.Ref_Cog
88 | if alpha > 180:
89 | alpha -= 360
90 | elif alpha < -180:
91 | alpha += 360
92 |
93 | # 两船之间的相对速度Relative_Speed
94 | temp = 2 * self.Tar_Sog * self.Ref_Sog * np.cos(alpha / (180 * np.pi))
95 | Relative_Speed = np.sqrt(pow(self.Tar_Sog, 2) + pow(self.Ref_Sog, 2) - temp)
96 |
97 | # 舷角Q
98 | x = pow(Relative_Speed, 2) + pow(self.Tar_Sog, 2) - pow(self.Ref_Sog, 2)
99 | y = 2 * Relative_Speed * self.Tar_Sog
100 | Q = np.arccos(x / y) * 180.0 / np.pi
101 |
102 | # 两船之间的相对航向Relative_Course
103 | if alpha > 0:
104 | Relative_Course = self.Tar_Cog + Q
105 | else:
106 | Relative_Course = self.Tar_Cog - Q
107 |
108 | # 相对舷角Bearing
109 | Bearing = self.bearing() - Relative_Course
110 |
111 | # 计算Tar_Ship和Ref_Ship之间的DCPA和TCPA
112 | DCPA = self.distance() * np.sin(Bearing * np.pi / 180.0)
113 | TCPA = self.distance() * np.cos(Bearing * np.pi / 180.0) / Relative_Speed
114 |
115 | return DCPA, TCPA
116 |
117 |
--------------------------------------------------------------------------------
/CPA/Distance.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 | from math import sin, radians, fabs, cos, asin, sqrt, atan2, pi
5 | from geohelper import bearing
6 | # EARTH_RADIUS = 6371 # 地球平均半径,6371km
7 | EARTH_RADIUS = 6378.1
8 | # 1nm = 1.852km
9 | n_mile = 1.852
10 |
11 |
12 | def hav(theta):
13 | s = sin(theta / 2)
14 | return s * s
15 |
16 |
17 | def get_distance_hav(lat0, lng0, lat1, lng1):
18 | # 用haversine公式计算球面两点间的距离
19 | # 经纬度转换成弧度
20 | lat0 = radians(lat0)
21 | lat1 = radians(lat1)
22 | lng0 = radians(lng0)
23 | lng1 = radians(lng1)
24 |
25 | dlng = fabs(lng0 - lng1)
26 | dlat = fabs(lat0 - lat1)
27 | h = hav(dlat) + cos(lat0) * cos(lat1) * hav(dlng)
28 | distance = 2 * EARTH_RADIUS * asin(sqrt(h))
29 |
30 | return distance / n_mile
31 |
32 |
33 | def mid_point(lat0, lon0, lat1, lon1):
34 | """
35 | mid_point(*args): Mid point between two latitude and longitude
36 | return: Mid Point(mid_lat, mid_lon)
37 | Unit: degree(°)
38 | """
39 | # 经差dLon
40 | dLon = radians(lon1 - lon0)
41 | # 将经纬度转弧度
42 | lat0 = radians(lat0)
43 | lat1 = radians(lat1)
44 | lon0 = radians(lon0)
45 | Bx = cos(lat1) * cos(dLon)
46 | By = cos(lat1) * sin(dLon)
47 | x = sqrt((cos(lat0) + Bx) * (cos(lat0) + Bx) + pow(By, 2))
48 | y = sin(lat0) + sin(lat1)
49 | mid_lat = 180 / pi * atan2(y, x)
50 | mid_lon = 180 / pi * (lon0 + atan2(By, cos(lat0) + Bx))
51 | return mid_lat, mid_lon
52 |
--------------------------------------------------------------------------------
/CPA/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/5
--------------------------------------------------------------------------------
/DataProcess/FinalData.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/6/13
4 | import numpy as np
5 |
6 | from CPA.DCPA_TCPA import CPA
7 |
8 |
9 | # 计算DCPA和TCPA
10 | def calculation(_dict):
11 | _vessel = _dict["SHIP_INFO"]
12 | length = len(_vessel)
13 | results = []
14 | for num in range(length):
15 | for _next_num in range(num+1, length):
16 | if _vessel[_next_num].MMSI != _vessel[num].MMSI:
17 | if _vessel[_next_num].area_id in _vessel[num].need_id():
18 | try:
19 | _cpa = CPA(_vessel[num], _vessel[_next_num])
20 | dcpa, tcpa = _cpa.cpa()
21 | if np.fabs(dcpa) <= 1 and 0 <= tcpa <= 0.1:
22 | result = {
23 | "Tar_Ship": _vessel[num].MMSI,
24 | "Ref_Ship": _vessel[_next_num].MMSI,
25 | "mid_lon": _cpa.mid_point()[1],
26 | "mid_lat": _cpa.mid_point()[0],
27 | "distance": _cpa.distance(),
28 | "DCPA": dcpa,
29 | "TCPA": tcpa
30 | }
31 | results.append(result)
32 | except Exception as e:
33 | print("Reason: ", e)
34 | return results
--------------------------------------------------------------------------------
/DataProcess/OptimizeMetaData.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/6/29
4 | import json
5 | import numpy as np
6 | import pandas as pd
7 | import matplotlib.path as mplPath
8 |
9 |
10 | origin_path = './Temporary_Data/2016-10-01.csv'
11 | geojson_path = './channel_geojson/map.geojson'
12 |
13 | origin_data = pd.read_csv(origin_path)
14 |
15 | with open(geojson_path) as f:
16 | geojson_data = json.load(f)
17 |
18 |
19 | longitude = origin_data.LON.values
20 | latitude = origin_data.LAT.values
21 | points = np.column_stack((longitude, latitude))
22 | PolygonLen = len(geojson_data['features'])
23 |
24 | for i in range(PolygonLen):
25 | polygon = geojson_data['features'][i]['geometry']['coordinates'][0]
26 | path = mplPath.Path(polygon)
27 | selector = path.contains_points(points)
28 | index = origin_data[selector].index
29 | points = np.delete(points, np.where(selector), axis=0)
30 | origin_data.drop(index, inplace=True)
31 |
32 | origin_data.to_csv('./Meta_Data/2016-10-01.csv', index=None)
33 |
34 |
--------------------------------------------------------------------------------
/DataProcess/SourceData.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/19
4 | import csv
5 | import numpy as np
6 | from datetime import timedelta
7 |
8 | from _Time.TimeStamp import date_range
9 | from _Time.ParseTime import parse_time
10 | from BinarySearch import BinarySearch
11 | from Area.Vessel import Vessel
12 |
13 | #************************************************************************************************
14 | # SourceData.py
15 | # Parameter:
16 | # path(元数据路径): "../ShipDataQuery/ChinaCoastalData/year-month-day.csv"
17 | # longitude(研究区域经度范围) = np.arange(...)
18 | # latitude(研究区域纬度范围) = np.arange(...)
19 | # delta(网格粒度) = 0.5
20 | # **kwargs(时间范围) = start、end、step
21 | # Return:
22 | # grids:
23 | # 存储网格及网格中船舶的list
24 | # 数据形式:
25 | # [
26 | # {"AIS delta_time": [Vessel_1(area_id, args, longitude, latitude, delta), ...]},
27 | # ...... ...... ...... ,
28 | # {"AIS delta_time": [Vessel_i(area_id, args, longitude, latitude, delta), ...]}
29 | # ]
30 | #************************************************************************************************
31 |
32 |
33 | def source_data(path, *, longitude, latitude, delta, **kwargs):
34 | lon_length = longitude.size
35 | # AIS存储网格区域
36 | # grids = [{time_: list()} for time_ in date_range(start, end, step)]
37 | grids = [{"TIME": time_, "SHIP_INFO": list()}
38 | for time_ in date_range(
39 | kwargs['start'], kwargs['end'], kwargs['step']
40 | )
41 | ]
42 | with open(path) as f_object:
43 | datas = csv.reader(f_object)
44 | # data: ['MMSI', 'TIME', 'LON', 'LAT', 'COG', 'SOG']
45 | next(datas)
46 | for data in datas:
47 | lon, lat = data[2], data[3]
48 | lon_, lat_ = 0, 0
49 | # 根据经、纬度(data[2], data[3])确定网格编号area_ID
50 | if int(lon[4]) < 5 and int(lat[3]) < 5:
51 | lon_ = int(lon[: 3])
52 | lat_ = int(lat[: 2])
53 | elif int(lon[4]) >= 5 and int(lat[3]) >= 5:
54 | lon_ = int(lon[: 3]) + 0.5
55 | lat_ = int(lat[: 2]) + 0.5
56 | elif int(lon[4]) < 5 and int(lat[3]) >= 5:
57 | lon_ = int(lon[: 3])
58 | lat_ = int(lat[: 2]) + 0.5
59 | elif int(lon[4]) >= 5 and int(lat[3]) < 5:
60 | lon_ = int(lon[: 3]) + 0.5
61 | lat_ = int(lat[: 2])
62 | else:
63 | print("")
64 | print("Can't convert to grid index!!!")
65 | print("Can't find {}/{}".format(lon, lat))
66 | print("_lon: ", lon_, "_lat: ", lat_)
67 | lon_remainder = int(*np.where(longitude == lon_))
68 | lat_quotient = int(*np.where(latitude == lat_))
69 | # are_id 根据该船当前时刻的经、纬度, 判断出该船所属网格编号
70 | area_id = lat_quotient * lon_length + lon_remainder
71 | time = parse_time(data[1])
72 | time_remainder = time.second % 10
73 | if time_remainder in range(8, 10):
74 | s = 10 - time_remainder
75 | _time = time + timedelta(seconds=s)
76 | elif time_remainder in range(0, 3):
77 | _time = time - timedelta(seconds=time_remainder)
78 | elif time_remainder in range(3, 6):
79 | s = 5 - time_remainder
80 | _time = time + timedelta(seconds=s)
81 | else:
82 | s = time_remainder - 5
83 | _time = time - timedelta(seconds=s)
84 | grids[BinarySearch(grids, _time)]["SHIP_INFO"].append(Vessel(
85 | area_id, data, gridlon_=longitude, gridlat_=latitude, grid_delta=delta
86 | ))
87 | return grids
--------------------------------------------------------------------------------
/DataProcess/channel_geojson/map.geojson:
--------------------------------------------------------------------------------
1 | {"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":{"type":"Polygon","coordinates":[[[122.49405555555556,31.104694444444444],[122.32073415,31.10399454],[122.29536948,31.11041076],[122.09546326,31.23178898],[121.867487826,31.26957459],[121.85971099,31.27142255],[121.75000111,31.31171865],[121.71359522,31.32499963],[121.500694949,31.42815521],[121.470603666,31.457355634],[121.466394853,31.45518042],[121.4976318914,31.424916883],[121.6064638161,31.37385802],[121.7121863365,31.32215023],[121.8612065683,31.267397513],[122.07991182804,31.2312072515],[122.1056946538,31.2215819611],[122.3063492774,31.1000633239],[122.49405555555556,31.101055555555554],[122.49405555555556,31.104694444444444]]]}},{"type":"Feature","properties":{},"geometry":{"type":"Polygon","coordinates":[[[122.49405555555556,31.008555555555557],[122.275256822,31.05239110129],[122.274266781,31.048683715],[122.1475659811,31.05719638025],[122.1098454169,31.066099906],[122.07243727295,31.07582231979],[122.03665374,31.08254804144],[121.9977754533,31.0958469607],[121.973938725859,31.11054315044],[121.91943713374,31.1410399903],[121.899726390838,31.1534178256988],[121.87195817839,31.16757027968],[121.79192593147,31.2395524809],[121.713589715695,31.3160699814],[121.700026558,31.30979368001],[121.721664965573,31.294661476307],[121.75395519117,31.2622881925],[121.75471488132,31.261665643311],[121.78999889111,31.2248384269],[121.807800119488,31.20486726318],[121.8280116617,31.18508449489],[121.889710984249,31.13395858568],[121.964501622053,31.09592660496],[121.9894398433,31.08260777926],[122.17033350626383,30.9828123849],[122.18316812142919,30.95850913347],[122.14688122272491,30.808281227946],[122.157244696715,30.806941474918],[122.194185094118,30.95952139246],[122.17864519148695,30.98900924688],[121.9977865340759,31.0866866436],[122.0283301087171,31.0733299667819],[122.09238684767352,31.05711011959708],[122.12284785382062,31.047110802494],[122.270244000938,31.0363571323669],[122.27092216501373,31.03618324414],[122.49405555555556,30.990944444444445],[122.49405555555556,31.008555555555557]]]}}]}
--------------------------------------------------------------------------------
/DataProcess/data_process.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/6/17
4 | import csv
5 | import numpy as np
6 | from datetime import timedelta
7 | from operator import itemgetter
8 | from itertools import chain, groupby
9 | from multiprocessing import Pool
10 |
11 | from _Time.ParseTime import parse_time
12 | from DataProcess.SourceData import source_data
13 | from DataProcess.FinalData import calculation
14 |
15 | #************************************************************
16 | # data_process.py(待添加功能)
17 | # ——Meta_Data Directory: 2016年10月至12月AIS元数据(.csv)
18 | # ——Final_Data_Result: data_process.py处理后的结果(.csv)
19 | # ——SourceData.py
20 | # ——FinalData.py
21 | #
22 | # Global Variable:
23 | # (1) longitude、latitude、delta
24 | # (2) start、end、step
25 | #
26 | # 进程池Pool:
27 | # MacBook Pro2017: 8核心
28 | # Dell T7610: 12核心
29 | #
30 | # 待添加功能:
31 | # 将长江口定线制中北槽航道、南槽航道的经纬度写成GeoJson,
32 | # 然后剔除在此GeoJson文件中船舶, 生成最终的final_data.csv,
33 | # 写入Final_Data_Result目录中
34 | #
35 | #************************************************************
36 |
37 | # 元数据路径
38 | path = './Meta_Data/2016-10-01.csv'
39 |
40 | ##网格范围
41 |
42 | # 研究区域经度范围
43 | longitude = np.arange(120, 126, 0.5)
44 | # 研究区域纬度范围
45 | latitude = np.arange(30, 36, 0.5)
46 | # 研究区域网格粒度
47 | delta = 0.5
48 |
49 | ## 时间范围
50 |
51 | # 起始时间
52 | start = parse_time("2016-10-01 00:00:00")
53 | # 终止时间
54 | end = parse_time("2016-10-02 00:00:05")
55 | # 时间步长step
56 | step = timedelta(seconds=5)
57 |
58 | if __name__ == '__main__':
59 | import time
60 | start_time = time.time()
61 | with Pool() as pool:
62 | datas = list(chain(*pool.map(calculation, source_data(
63 | path, longitude=longitude, latitude=latitude, delta=delta, start=start, end=end, step=step
64 | ))))
65 | datas.sort(key=itemgetter('Tar_Ship', 'Ref_Ship'))
66 | print("")
67 | print("Process has finished!!!")
68 | print("The Length of datas %s" % len(datas))
69 | data = [min(items, key=itemgetter('distance')) for tar, items in
70 | groupby(datas, key=itemgetter('Tar_Ship', 'Ref_Ship'))]
71 | print(len(data))
72 | with open("./Final_Data_Result/10-01.csv", 'w') as f:
73 | _data = csv.DictWriter(f, ['Tar_Ship', 'Ref_Ship', 'mid_lon', 'mid_lat', 'distance', 'DCPA', 'TCPA'])
74 | _data.writeheader()
75 | _data.writerows(data)
76 | end_time = time.time()
77 | print("Time Cost: {:.3f}s".format(end_time - start_time))
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AIS_Collision
2 | ***Yangtze river estuary collision risk distribution***
3 |
4 |
5 |
6 | ---
7 |
8 | ***Geojson data for the channel of Yangtze River(Polygon)***
9 |
10 | [The channel of Yangtze River(Polygon)](https://github.com/ginlph/AIS_Collision/blob/master/DataProcess/channel_geojson/map.geojson)
11 |
12 | ### Code Structure
13 | * **_Time**
14 | * *ParseTime.py* `"%Y-%m-%d %H:%M:%S" -> datetime(year, month, day, hour, minute, second)`
15 | * *TimeStamp.py* `date_range(start, step, end)`
16 | * **Area**
17 | * *Grid.py* `class Grid`
18 | * area_id: grid index(网格编号)
19 | * gridlon_: `np.arange(120, 126, 0.5)` (网格经度范围)
20 | * gridlat_: `np.arange(30, 36, 0.5)` (网格纬度范围)
21 | * griddelta: `0.5` (网格间隔)
22 |
23 | * *Vessel.py* `class Vessel(Grid)`
24 | * args:
25 | * |————MMSI
26 | |————TIME
27 | AIS ————|————LON
28 | |————LAT
29 | |————COG
30 | |————SOG
31 | * area_id: same as Grid.area_id
32 | * gridlon_: same as Grid.gridlon_
33 | * gridlat_: same as Grid.gridlat_
34 | * griddelta_: same as Grid.delta_
35 |
36 | * **CPA**
37 | * *DCPA_TCPA.py*
38 | * distance method: distance between tar_ship and ref_ship(目标船与参考船之间的距离)
39 | * mid_point method: the half-way point along a great circle path between the two vessels(目标船与参考船之间中点地理位置)
40 | * bearing method: initial bearing(方位)
41 |
42 |
43 | * cpa method: return DCPA, TCPA(计算两船在同一AIS时刻下的DCPA和TCPA)
44 |
45 | * **DataProcess**
46 | * *channel_geojson* Geojson data for the channel of Yangtze River(Polygon)
47 | * *Meta_Data* Meta_Data queried from the mongodb database(csv format)
48 | * *Final_Data_Result* The final result!(csv format)
49 | * *SourceData.py*
50 | * Store the grid and list of ships in the grid(创建网格,将AIS数据按TIME存入grid中)
51 | * *FinalData.py*
52 | * Call the method in **CPA.DCPA_TCPA**, return the final result
53 | (调用CPA.DCPA_TCPA中有关方法,计算最终结果)
54 | * *data_process.py*
55 | * store result in the **Final_Data_Result** folder
56 | (计算结果存入Final_Data_Result文件夹)
57 |
58 | * **ShipDataInsert**
59 | * Insert the AIS data into the mongodb database
60 |
61 | * **ShipDataQuery**
62 | * Query the AIS data from the mongodb database and store it in "../../DataProcess/Meta_Data"
63 |
64 | * **main.py**
65 | * use `cartopy module` to draw Yangtze river estuary collision risk distribution
66 |
67 | ### 未完待续...
68 | ***Author: LPH,TIME: 2019年06月28日 下午15:15***
69 |
70 | ***Address 上海海事大学(Shanghai maritime university)***
71 |
72 | ***Email 664104221@qq.com***
--------------------------------------------------------------------------------
/RiskOfCollisionPicture/2016-10-01/Distribution_of_collision_risk_in_Yangtze_river_estuary(2016-10-01).png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ginlph/AIS_Collision/a60ae1d9d7a6260b79aa2710d5eb88698981ff12/RiskOfCollisionPicture/2016-10-01/Distribution_of_collision_risk_in_Yangtze_river_estuary(2016-10-01).png
--------------------------------------------------------------------------------
/ShipDataInsert/AsyncMotorDataSet/AsyncShipData.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/7
4 | import os
5 | import logging
6 | import time
7 | import asyncio
8 | import motor.motor_asyncio
9 | from datetime import datetime
10 | path = '/Volumes/My Passport/test_motor'
11 |
12 | # Support URL
13 | # client = motor.motor_asyncio.AsyncIOMotorClient('mongodb://localhost:27017')
14 | client = motor.motor_asyncio.AsyncIOMotorClient('localhost', 27017)
15 |
16 | # Getting a Database
17 | db = client.ais_motor
18 |
19 | # Getting a Collection
20 | collection = db.test_motor
21 |
22 | file_path = [
23 | os.path.join(root, file)
24 | for root, dirs, files in os.walk(path)
25 | for file in files
26 | ]
27 |
28 | print("There are {} endswith txt files".format(len(file_path)))
29 |
30 | # Inserting a Document
31 |
32 |
33 | async def do_insert():
34 | start = time.time()
35 | for file in file_path:
36 | single_ship = list()
37 | if not os.path.getsize(file):
38 | print("This {} is None".format(file))
39 | else:
40 | with open(file, encoding='utf8') as f:
41 | try:
42 | for line in f:
43 | lines = line.strip().split(';')
44 | ship_info = {
45 | "MMSI": int(lines[0]),
46 | "TIME": datetime.strptime(lines[1], "%Y-%m-%d %H:%M:%S"),
47 | "location": {
48 | "type": "Point",
49 | "coordinates": [
50 | float(lines[2]), float(lines[3])
51 | ]
52 | },
53 | "COG": float(lines[4]),
54 | "SOG": float(lines[5])
55 | }
56 | single_ship.append(ship_info)
57 | result = await collection.insert_many(single_ship)
58 | print('inserted %d ShipInfo' % (len(result.inserted_ids),))
59 | except Exception as e:
60 | logging.exception(e)
61 | print("The file path is {}".format(file))
62 | print("Reason: ", e)
63 | end = time.time()
64 | total = end - start
65 | print("Total time spent inserting the MongoDB: " + str(total) + " s.")
66 |
67 | loop = asyncio.get_event_loop()
68 | loop.run_until_complete(do_insert())
69 | print("All txt files have already inserted in the MongoDB")
--------------------------------------------------------------------------------
/ShipDataInsert/SyncPymongoDataSet/ShipDataInsert.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/2/18
4 | import os
5 | import time
6 | import logging
7 | import pymongo
8 | from pymongo import MongoClient
9 | from datetime import datetime
10 |
11 | path = '/Users/lipenghao/Desktop/test_motor'
12 | # path = '/Volumes/My Passport/ais_data_/allshipsin2017_trajectory4'
13 |
14 | """
15 | 使用mongodb登录
16 | """
17 | client = MongoClient('localhost', 27017)
18 | # 数据库database: ais_example
19 | db = client.ais_sync
20 | # 集合collection: test_ais
21 | collection = db.test_sync
22 |
23 |
24 | file_path = [
25 | os.path.join(root, file)
26 | for root, dirs, files in os.walk(path)
27 | for file in files
28 | ]
29 |
30 | print("There are {} endswith txt files".format(len(file_path)))
31 |
32 | start_time = time.time()
33 |
34 | for file in file_path:
35 | single_ship = list()
36 | if not os.path.getsize(file):
37 | print("This {} is None".format(file))
38 | else:
39 | with open(file, encoding='utf8') as f:
40 | try:
41 | for line in f:
42 | lines = line.strip().split(';')
43 | ship_info = {
44 | "MMSI": int(lines[0]),
45 | "TIME": datetime.strptime(lines[1], "%Y-%m-%d %H:%M:%S"),
46 | "location": {
47 | "type": "Point",
48 | "coordinates": [
49 | float(lines[2]), float(lines[3])
50 | ]
51 | },
52 | "COG": float(lines[4]),
53 | "SOG": float(lines[5])
54 | }
55 | single_ship.append(ship_info)
56 | result = collection.insert_many(single_ship)
57 | print('inserted %d ShipInfo' % (len(result.inserted_ids),))
58 | except Exception as e:
59 | logging.exception(e)
60 | print("The file path is {}".format(file))
61 | print("Reason: ", e)
62 |
63 |
64 | end_time = time.time()
65 | total_time = end_time - start_time
66 | print("All txt files have already inserted in the MongoDB")
67 | print("Total time spent inserting the MongoDB: " + str(total_time) + " s.")
--------------------------------------------------------------------------------
/ShipDataInsert/SyncPymongoDataSet/mongo_example.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/4/26
4 | import datetime
5 | from pprint import pprint
6 | from pymongo import MongoClient
7 |
8 | client = MongoClient('mongodb://localhost:27017')
9 | db = client.inventory
10 | collection = db.test_collection
11 |
12 | # data
13 | post = {
14 | "author": "Mike",
15 | "text": "My first blog post!",
16 | "tags": ["mongodb", "python", "pymongo"],
17 | "date": datetime.datetime.utcnow()
18 | }
19 |
20 | # 插入document
21 | # collection.insert_one(post)
22 |
23 | # 重点!!!Bulk insert
24 | # 大量插入!insert_many()
25 |
26 | new_posts = [
27 | {
28 | "author": "Mike",
29 | "text": "Another post!",
30 | "tags": ["bulk", "insert"],
31 | "date": datetime.datetime(2009, 11, 12, 11, 14)
32 | },
33 | {
34 | "author": "Eliot",
35 | "title": "MongoDB is fun",
36 | "text": "and pretty easy too!",
37 | "date": datetime.datetime(2009, 11, 10, 10, 45)
38 | }
39 | ]
40 |
41 | # result = collection.insert_many(new_posts)
42 | # for i in collection.find({"author": "Mike"}):
43 | # print(i)
44 |
45 | # Range Queries
46 | d = datetime.datetime(2009, 11, 12, 12)
47 | for i in collection.find({"date": {"$lte": d}}, {"_id": 0}).sort("author"):
48 | pprint(i)
49 |
50 | # Indexing
51 |
52 | # sort、limit
53 | # for info in collection.find({"MMSI": 488000172}).sort('TIME', pymongo.ASCENDING).limit(3):
54 | # pprint(info)
55 |
56 | # explain
57 | """
58 | pprint(db.command(
59 | {
60 | "explain": {"count": "test_ais", "query": {"MMSI": 488000172} },
61 | "verbosity": "executionStats"
62 | }
63 | )['executionStats'])
64 |
65 | """
--------------------------------------------------------------------------------
/ShipDataQuery/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ginlph/AIS_Collision/a60ae1d9d7a6260b79aa2710d5eb88698981ff12/ShipDataQuery/.DS_Store
--------------------------------------------------------------------------------
/ShipDataQuery/ChinaCoastalData.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/5
4 | import json
5 | import csv
6 | import time
7 | from pymongo import MongoClient
8 | from ShipDataQuery.ComplexEncoder import ComplexEncoder
9 | """
10 | 研究区域:
11 | (Longitude, Latitude)
12 | Left Corner: Right Corner:
13 | (120°, 30°) (125°, 35°)
14 |
15 | 网格精度:
16 | 0.5° * 0.5°
17 |
18 | Polygon:
19 | [
20 | [
21 | [120, 30],
22 | [125, 30],
23 | [125, 35],
24 | [120, 35],
25 | [120, 30]
26 | ]
27 | ]
28 |
29 | 导出形式:
30 | ChinaCoastalData.json
31 |
32 | 将位于上述网格区域内的所有船舶经、纬度从mongodb数据库中筛选出来
33 | """
34 |
35 | # 使用mongodb登录
36 | client = MongoClient('localhost', 27017)
37 | # 数据库database: ais_motor
38 | db = client.ChinaCoastalData
39 | # 集合collection: test_motor
40 | collection = db.chinacoastaldata
41 |
42 | # 开始时间
43 | start = time.time()
44 |
45 | results = collection.find({
46 | "TIME": {
47 | "$gte": "2016-10-01",
48 | "$lt": "2016-10-02"
49 | },
50 | "location": {
51 | "$geoWithin": {
52 | "$geometry": {
53 | "type": "Polygon",
54 | "coordinates": [
55 | [
56 | [120, 30],
57 | [125, 30],
58 | [125, 35],
59 | [120, 35],
60 | [120, 30]
61 | ]
62 | ]
63 | }
64 | }
65 | }
66 | }, {"_id": 0})
67 |
68 |
69 | """
70 | # 使用.json格式储存Polygon中的船舶AIS数据
71 | 将查询结果写入ChinaCoastalData.json
72 | ship_info = [{"MMSI": result["MMSI"], "TIME": result["TIME"], "LON": result["location"]["coordinates"][0],
73 | "LAT": result["location"]["coordinates"][1], "COG": result["COG"], "SOG": result["SOG"]}
74 | for result in results]
75 | with open('./ChinaCoastalData/2016_10_01.json', 'w') as file_object:
76 | json.dump(ship_info, file_object, cls=ComplexEncoder)
77 | """
78 |
79 |
80 | # 使用.csv格式存储Polygon中的船舶AIS数据
81 | with open('./ChinaCoastalData/2016_10_01_test.csv', 'w') as f_write:
82 | datas = csv.writer(f_write)
83 | Header = ["MMSI", "TIME", "LON", "LAT", "COG", "SOG"]
84 | datas.writerow(Header)
85 | for result in results:
86 | datas.writerow([
87 | result["MMSI"],
88 | result["TIME"],
89 | result["location"]["coordinates"][0],
90 | result["location"]["coordinates"][1],
91 | result["COG"],
92 | result["SOG"]
93 | ])
94 |
95 | # 结束时间
96 | end = time.time()
97 | # 总用时
98 | total = end - start
99 |
100 | print("-------------------------------------")
101 | print("*********The Work is done!***********")
102 | print("The total Time is {}!".format(total))
103 | # print("The query has {} documents!".format(len(ship_info)))
104 | print("-------------------------------------")
105 |
106 |
--------------------------------------------------------------------------------
/ShipDataQuery/ComplexEncoder.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/5
4 | from datetime import datetime
5 | from datetime import date
6 | import json
7 | """
8 | json处理datetime时, 抛出TypeError:
9 | TypeError: Object of type 'datetime' is not JSON serializable
10 |
11 | 解决方式:
12 | 重写json.JSONEncoder子类
13 | ComplexEncoder
14 | """
15 |
16 |
17 | class ComplexEncoder(json.JSONEncoder):
18 | def default(self, obj):
19 | if isinstance(obj, datetime):
20 | return obj.strftime('%Y-%m-%d %H:%M:%S')
21 | elif isinstance(obj, date):
22 | return obj.strftime('%Y-%m-%d')
23 | else:
24 | return json.JSONEncoder.default(self, obj)
25 |
--------------------------------------------------------------------------------
/_Time/ParseTime.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/14
4 | from datetime import datetime
5 |
6 |
7 | def parse_time(string):
8 | """
9 | Function: "%Y-%m-%d %H:%M:%S" convert to datetime(year, month, day, hour, minute, second)
10 | """
11 | year, month, day = string[:10].split('-')
12 | hour, minute, second = string[11:].split(':')
13 | return datetime(int(year), int(month), int(day), int(hour), int(minute), int(second))
14 |
15 |
--------------------------------------------------------------------------------
/_Time/TimeStamp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/8
4 |
5 |
6 | def date_range(start, stop, step):
7 | while start < stop:
8 | yield start
9 | start += step
10 |
11 |
--------------------------------------------------------------------------------
/_Time/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/8
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/13
4 | import numpy as np
5 | import pandas as pd
6 | import matplotlib.pyplot as plt
7 | import cartopy.crs as ccrs
8 | from cartopy.io import img_tiles
9 | from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
10 |
11 | #-------------------------------------------
12 | # 输入参数:
13 | # (1)FinalDataSet.csv的路径:
14 | # (2)
15 | #-------------------------------------------
16 |
17 | # 读取经纬度
18 | path = './DataProcess/Final_Data_Result/10-01.csv'
19 |
20 | # 读取finall_data-10-01.csv
21 | df = pd.read_csv(path)
22 |
23 | request = img_tiles.TDT()
24 | fig = plt.figure(figsize=(10, 10), dpi=100)
25 | ax = plt.axes(projection=request.crs)
26 | ax.set_extent([120, 126, 30, 36], crs=ccrs.PlateCarree())
27 |
28 | # 经纬度grid
29 | gl = ax.gridlines(color='black', linestyle='--', draw_labels=True)
30 | gl.xlabels_top = False
31 | gl.ylabels_right = False
32 | gl.xformatter = LONGITUDE_FORMATTER
33 | gl.yformatter = LATITUDE_FORMATTER
34 |
35 | ax.add_image(request, 8)
36 |
37 | ax.scatter(df.mid_lon, df.mid_lat, transform=ccrs.PlateCarree(), s=.03, color='red')
38 | plt.savefig("./RiskOfCollisionPicture/2016-10-01/Distribution_of_collision_risk_in_Yangtze_river_estuary(2016-10-01).png", bbox_inches='tight')
39 | plt.show()
--------------------------------------------------------------------------------
/test/Readtest.txt:
--------------------------------------------------------------------------------
1 | test Introduction
2 | --test
3 | --TestAisData
4 | --414186000.txt
5 | --413466840.txt
6 | --414189000.txt
7 | --414190000.txt
8 | --414191000.txt
9 | --413997996.txt
10 | --413998001.txt
11 | --413998221.txt
12 | --414142000.txt
13 | --414143000.txt
14 | --414145625.txt
15 | --416001438.txt
16 | --416001440.txt
17 | --416001447.txt
18 | --431003365.txt
19 | --431003374.txt
20 | --412900838.txt
21 | --412900882.txt
22 | --Mongodb.py
23 | --DCPA_and_TCPA_Result.py
24 | --Collision risk distribution.py
25 | --StudyChinaArea.png
26 |
27 |
28 | TestAisData
29 | 存储AIS数据文件目录
30 |
31 | StudyChinaArea.png
32 | 对中国沿海网格划分(0.5*0.5°), 表征网格粒度
33 |
34 | Mongodb.py
35 | 将TestAisData目录下的AIS数据插入到MongoDB数据库中
36 |
37 | DCPA_and_TCPA_Result.py
38 | 计算出DCPA和TCPA的频率计数, 根据日间1海里、夜晚2海里
39 |
40 | Collision risk distribution.py
41 | 根据计算出的DCPA和TCPA频率计数, 使用Cartopy和matplotlib画出中国沿海船舶碰撞风险分布图
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/test/test_async_query.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/7
4 | import time
5 | import asyncio
6 | import motor.motor_asyncio
7 | from datetime import datetime
8 | from pprint import pprint
9 |
10 | # Support URL
11 | # client = motor.motor_asyncio.AsyncIOMotorClient('mongodb://localhost:27017')
12 | client = motor.motor_asyncio.AsyncIOMotorClient('localhost', 27017)
13 |
14 | # Getting a Database
15 | db = client.ais_motor
16 |
17 | # Getting a Collection
18 | collection = db.test_motor
19 |
20 |
21 | async def do_find():
22 | start = time.time()
23 | cursor = collection.find({
24 | "TIME": {
25 | "$gte": datetime(2016, 12, 25),
26 | "$lte": datetime(2016, 12, 26)
27 | }, "location": {
28 | "$geoWithin": {
29 | "$geometry": {
30 | "type": "Polygon",
31 | "coordinates": [
32 | [
33 | [109, 23],
34 | [130, 23],
35 | [130, 40],
36 | [109, 40],
37 | [109, 23]
38 | ]
39 | ]
40 | }
41 | }
42 | }
43 | }, {"_id": 0})
44 | # async for document in cursor:
45 | # pprint(document)
46 | ship_info = [{'location': result['location']['coordinates']} async for result in cursor]
47 | print(ship_info[0])
48 | end = time.time()
49 | total = end - start
50 | print("Total time spent query the MongoDB: " + str(total) + " s.")
51 |
52 | loop = asyncio.get_event_loop()
53 | loop.run_until_complete(do_find())
54 |
--------------------------------------------------------------------------------
/test/test_grid_index.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/7
4 | import json
5 | import numpy as np
6 | from datetime import datetime
7 | from datetime import timedelta
8 | """
9 | 研究区域:
10 | (Longitude, Latitude)
11 | Left Corner: Right Corner:
12 | (120°, 30°) (125°, 35°)
13 |
14 | 网格精度:
15 | 0.5° * 0.5°
16 |
17 | 时间范围:
18 | 2016-10-01 2016-10-31
19 | """
20 |
21 | # generate time data
22 |
23 |
24 | def date_range(start, stop, step):
25 | while start < stop:
26 | yield start
27 | start += step
28 |
29 |
30 | longitude = np.arange(120, 125, 0.5)
31 | latitude = np.arange(30, 35, 0.5)
32 | Time = date_range(datetime(2016, 10, 1), datetime(2016, 11, 1), timedelta(hours=1))
33 |
34 | # 建立网格
35 | grid_index = list()
36 | for time in Time:
37 | count = 0
38 | _time = datetime.strftime(time, "%Y-%m-%d %H:%M:%S")
39 | temporary = []
40 | for lat in latitude:
41 | for lon in longitude:
42 | sub_grid = {
43 | "Left": {"Lon": lon, "Lat": lat},
44 | "Right": {"Lon": lon + 0.5, "Lat": lat + 0.5}
45 | }
46 |
47 | if (lat == min(latitude) and lon == min(longitude)) or \
48 | (lat == max(latitude) and lon == max(longitude)) or \
49 | (lat == min(latitude) and lon == max(longitude)) or \
50 | (lat == max(latitude) and lon == min(longitude)):
51 | sub_grid['Flag'] = 1
52 | elif (lat == min(latitude) and min(longitude) < lon < max(longitude)) or \
53 | (lat == max(latitude) and min(longitude) < lon < max(longitude)) or \
54 | (lon == min(longitude) and min(latitude) < lat < max(latitude)) or \
55 | (lon == max(longitude) and min(latitude) < lat < max(latitude)):
56 | sub_grid['Flag'] = 2
57 | else:
58 | sub_grid['Flag'] = 3
59 | sub_grid['area_id'] = count
60 | count += 1
61 | temporary.append(sub_grid)
62 | grid_index.append({_time: temporary})
63 |
64 |
65 | with open('test_grid.json', 'w') as f:
66 | json.dump(grid_index, f)
67 |
68 |
--------------------------------------------------------------------------------
/test/test_sync_query.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/5
4 | import json
5 | import time
6 | from datetime import datetime
7 | from pymongo import MongoClient
8 | from ShipDataQuery.ComplexEncoder import ComplexEncoder
9 | from pprint import pprint
10 |
11 | # 使用mongodb登录
12 | client = MongoClient('localhost', 27017)
13 | # 数据库database: ais
14 | db = client.ais
15 | # 集合collection: trajectory4
16 | collection = db.trajectory4
17 | # 开始时间
18 | start_time = time.time()
19 | results = collection.find({
20 | "TIME": {
21 | "$gte": datetime(2016, 12, 1),
22 | "$lte": datetime(2016, 12, 2)
23 | }, "location": {
24 | "$geoWithin": {
25 | "$geometry": {
26 | "type": "Polygon",
27 | "coordinates": [
28 | [
29 | [110, 39],
30 | [130, 39],
31 | [130, 42],
32 | [110, 42],
33 | [110, 39]
34 | ]
35 | ]
36 | }
37 | }
38 | }
39 | }, {"_id": 0})
40 |
41 | # 将查询结果写入yao11.json
42 | ship_info = [{'location': result['location']['coordinates']} for result in results]
43 | with open('2016_12_01to02.json', 'w') as file_object:
44 | json.dump(ship_info, file_object, cls=ComplexEncoder)
45 |
46 | # 结束时间
47 | end_time = time.time()
48 | total_time = end_time - start_time
49 | print("Total time spent query the MongoDB: " + str(total_time) + " s.")
50 | print("------**********---------")
51 | print("The Work is done!")
52 | # print("The total Time is {}!".format(total_time))
53 | print("The query has {} documents!".format(len(ship_info)))
54 | print("------**********---------")
--------------------------------------------------------------------------------
/test/test_time.md:
--------------------------------------------------------------------------------
1 | #First test:
2 | ##Enviroment: MacOS, 16 GB Memory, 8-core
3 | ##ScriptPath: 'FinalData.py'
4 | ##method: sort and group by
5 | ##decorator: No
6 | ##Write to csv file: No
7 | ##Cost Time: 1h 34min 10s
8 |
9 |
10 | #Second test: 2019/06/17
11 | ##Enviroment: MacOS, 16 GB Memory, 8-core
12 | ##ScriptPath: 'AIS_Collision/DataProcess/data_process.py'
13 | ##method: SourceData.source_data + FinalData.calculation
14 | ##Write to csv file: yes
15 | ##SourceData Directory: 'AIS_Collision/DataProcess/Meta_Data'
16 | ##FinalData Directory: 'AIS_Collision/DataProcess/Final_Data_Result'
17 | ##The Length of original file —— final_data.csv: 80029
18 | ##The Length of final file —— finall_data-10-01.csv: 80089
19 | ##Cost Time: 1444.872s
20 |
21 |
22 | #Second test: 2019/06/17
23 | ##Enviroment: DELL T7610, 16 GB Memory, 12-core
24 | ##ScriptPath: 'AIS_Collision/DataProcess/data_process.py'
25 | ##method: SourceData.source_data + FinalData.calculation
26 | ##Write to csv file: yes
27 | ##SourceData Directory: 'AIS_Collision/DataProcess/Meta_Data'
28 | ##FinalData Directory: 'AIS_Collision/DataProcess/Final_Data_Result'
29 | ##The Length of original final_data.csv: 80029
30 | ##The Length of original final_data.csv: 80089
31 | ##Cost Time: 1845.163s
--------------------------------------------------------------------------------
/test_grid.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # author: lph time:2019/5/11
4 | import fiona
5 | import matplotlib.pyplot as plt
6 | import cartopy.crs as ccrs
7 | import cartopy.feature as cfeature
8 | import shapely.geometry as sgeom
9 | from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
10 |
11 | map_url = '/Volumes/My Passport/shp_file/chinamap/cnmap/cnhimap.shp'
12 | fig = plt.figure(figsize=(15, 9))
13 | ax = plt.axes(projection=ccrs.PlateCarree())
14 | ax.set_extent([115, 130, 25, 40], crs=ccrs.PlateCarree())
15 |
16 |
17 | # ax.coastlines('10m')
18 | ax.add_feature(cfeature.BORDERS.with_scale('10m'), linestyle=':')
19 | ax.add_feature(cfeature.COASTLINE.with_scale('10m'))
20 | ax.add_feature(cfeature.LAKES, alpha=0.5)
21 | ax.add_feature(cfeature.RIVERS)
22 | with fiona.open(map_url) as f:
23 | geometries = [sgeom.shape(shp['geometry']) for shp in f]
24 | ax.add_geometries(geometries, ccrs.PlateCarree(), facecolor='none', edgecolor='black')
25 |
26 | gl = ax.gridlines(color='black', linestyle='--', draw_labels=True)
27 | gl.xformatter = LONGITUDE_FORMATTER
28 | gl.yformatter = LATITUDE_FORMATTER
29 | gl.xlabels_top = False
30 | gl.ylabels_right = False
31 |
32 | land = cfeature.NaturalEarthFeature('physical', 'land', '50m', edgecolor='face', facecolor=cfeature.COLORS['land'])
33 | ax.add_feature(land)
34 | ocean = cfeature.NaturalEarthFeature('physical', 'ocean', '50m', edgecolor='face', facecolor=cfeature.COLORS['water'])
35 | #
36 | ax.add_feature(ocean)
37 |
38 | plt.show()
--------------------------------------------------------------------------------