├── .classpath
├── .gitignore
├── .project
├── .settings
├── org.eclipse.core.resources.prefs
├── org.eclipse.jdt.core.prefs
└── org.eclipse.m2e.core.prefs
├── GettingStarted
└── GettingStarted.java
├── README.md
├── SampleData
├── sample.udb
└── sample.udd
├── image
├── company.png
├── result.png
└── shiyi.png
├── jar
└── 2.0
│ ├── geokg-2.0-jar-with-dependencies.jar
│ ├── geokg-2.0-sources.jar
│ └── geokg-2.0.jar
├── pom.xml
└── src
└── main
└── java
└── www
└── supermap
├── geoknowledge
└── KnowledgeGraph.java
├── model
└── iobjects
│ ├── GeoObjectEntity.java
│ ├── LineObjectEntity.java
│ ├── ObjectGrid.java
│ ├── PointObjectEntity.java
│ ├── RecordSetEntity.java
│ └── RegionObjectEntity.java
└── utils
├── Common.java
├── Iobjects.java
├── Rdf4j.java
└── S2.java
/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled class file
2 | *.class
3 |
4 | # Log file
5 | *.log
6 |
7 | # BlueJ files
8 | *.ctxt
9 |
10 | # Mobile Tools for Java (J2ME)
11 | .mtj.tmp/
12 |
13 | # Package Files #
14 | *.jar
15 | *.war
16 | *.nar
17 | *.ear
18 | *.zip
19 | *.tar.gz
20 | *.rar
21 |
22 | # 知识图谱文件
23 | *.ntriples
24 |
25 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
26 | hs_err_pid*
27 | /target/
28 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | ATLab-KnowledgeGraph
4 |
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 | org.eclipse.m2e.core.maven2Builder
15 |
16 |
17 |
18 |
19 |
20 | org.eclipse.jdt.core.javanature
21 | org.eclipse.m2e.core.maven2Nature
22 |
23 |
24 |
--------------------------------------------------------------------------------
/.settings/org.eclipse.core.resources.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | encoding//src/main/java=UTF-8
3 | encoding//src/test/java=UTF-8
4 | encoding/=UTF-8
5 |
--------------------------------------------------------------------------------
/.settings/org.eclipse.jdt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
3 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
4 | org.eclipse.jdt.core.compiler.compliance=1.7
5 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
6 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
7 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
8 | org.eclipse.jdt.core.compiler.source=1.7
9 |
--------------------------------------------------------------------------------
/.settings/org.eclipse.m2e.core.prefs:
--------------------------------------------------------------------------------
1 | activeProfiles=
2 | eclipse.preferences.version=1
3 | resolveWorkspaceProjects=true
4 | version=1
5 |
--------------------------------------------------------------------------------
/GettingStarted/GettingStarted.java:
--------------------------------------------------------------------------------
1 | import java.util.ArrayList;
2 | import java.util.HashMap;
3 | import java.util.Map.Entry;
4 |
5 | import www.supermap.geoknowledge.KnowledgeGraph;
6 | import www.supermap.model.iobjects.RecordSetEntity;
7 |
8 | public class GettingStarted {
9 |
10 | public static void main(String[] args) {
11 | // 1.以网格等级和图谱存储路径为参数,创建知识图谱
12 | String knowledgeGraphStoreDir = "SampleStore";
13 | KnowledgeGraph.createKnowledgeGraph(13, knowledgeGraphStoreDir);
14 | // 以网格长度为参数构建知识图谱
15 | //KnowledgeGraph.createKnowledgeGraph(1000.0,knowledgeGraphStoreDir);
16 |
17 | // 2.第一次创建之后,再次使用只需直接加载一个存在的知识图谱
18 | KnowledgeGraph knowledgeGraph = KnowledgeGraph.loadKnowledgeGraph(knowledgeGraphStoreDir);
19 |
20 | // 3.增量更新知识图谱
21 | String dataSource = "SampleData\\sample.udb";
22 | // 将udb中的所有数据集增加到知识图谱中的快捷方式
23 | // String[] arType = {};
24 | String[] arType = { "集体宿舍", "停车场", "行政办公用地" };
25 | knowledgeGraph.addKnowledgeGraph(dataSource, arType);
26 |
27 | // 4.查询知识图谱
28 | // 要查询的点的纬度及纬度举例(WGS84)
29 | double dLatitude = 29.6965845497988;
30 | double dLongitude = 106.625768872153;
31 | // 查询半径,单位:米
32 | double iRadius = 977.45;
33 | // 查询指定经纬度范围内图谱中的所有数据集类型
34 | String[] queryType = { "集体宿舍", "停车场", "行政办公用地" };
35 | // 4.1 不带时间参数的查询
36 | HashMap> result = knowledgeGraph.queryKnowledgeGraph(dLatitude, dLongitude,iRadius, queryType);
37 | // 4.2 带时间参数的查询
38 | // HashMap> timeResult = knowledgeGraph.queryKnowledgeGraph(dLatitude, dLongitude,iRadius, queryType,"2016");
39 |
40 | // 5.打印搜索结果
41 | // RecordSetEntity类目前有两个属性,分别为point和mingCheng,分别为实体的经纬度与名称,可以通过get()获得
42 | for (Entry> entry : result.entrySet()) {
43 | System.out.println(entry.getKey() + "个数:" + entry.getValue().size());
44 | for (RecordSetEntity recordSet : entry.getValue()) {
45 | System.out.println("\t" + recordSet.getMingCheng() + recordSet.getPoint());
46 | }
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | # 基于地理格网的时空知识图谱
3 | ## 简介
4 | ATLab-KnowledgeGraph 是北京超图软件股份有限公司未来GIS实验室发布的开源项目,在iobjects产品的基础上,将地理信息实体按照时间和位置划分到多个网格,使用网格、时间及各实体之间的位置关系来构建地理知识图谱。
5 | 使用本项目API,用户可以使用若干数据集来构建自己的地理格网知识图谱,从而快速查询出指定地点缓冲区内的兴趣点。
6 | 本项目在知识图谱的表示上使用了RDF,存储使用RDF4J数据库
7 | 图谱示意图:
8 | 
9 |
10 | 最终效果展示:
11 | 
12 |
13 | ---
14 | ## 如何运行及使用
15 | - 运行
16 | - 用eclipse直接clone本项目,GettingStarted目录下的GettingStarted类可以直接运行,查看结果
17 | - 同时jar目录下有生成的jar包,下载后也可以直接调用
18 | - 使用
19 | - 初次使用流程:新建知识图谱-->加载知识图谱-->添加数据-->添加或查询
20 | - 非初次:加载知识图谱-->添加或查询
21 |
22 | ---
23 | ## Geokg包中主要类与方法介绍
24 | - KnowledgeGraph类
25 | - 创建知识图谱方法
26 | - 调用创建图谱方法,则会在指定目录创建数据库,一个目录下只能创建一个知识图谱,否则程序报错并强制退出
27 | - 创建知识图谱的方法有两个,都为静态方法,可以通过类名KnowledgeGraph直接调用,分别为:
28 | ```java
29 | //@param iGridLevel 要构建的知识图谱网格的等级,取值范围为0-20,小于0取自动取0,大于20自动取20
30 | //@param strDataStore 自定义的存储知识图谱的本地目录
31 | public static boolean createKnowledgeGraph(int iGridLevel,String strDataStore){}
32 |
33 | //@param iGridLength 构建知识图谱的网格宽度(单位:米),根据传入的参数自动映射到网格等级,取值范围为9.8-9220000,分别对应等级20和0,小于9.8默认取9.8,大于9220000默认取9220000
34 | //@param 自定义的存储知识图谱的本地目录
35 | public static boolean createKnowledgeGraph(double iGridLength,String strDataStore){}
36 | ```
37 | - 加载知识图谱方法
38 | - 以固定的存储路径为参数来加载一个已经存在的知识图谱,方法将返回一个知识图谱对象。
39 | - 加载知识图谱的方法也为静态方法
40 | ```java
41 | //@param strDataStore 自定义的存储知识图谱的本地目录
42 | public static KnowledgeGraph loadKnowledgeGraph(String strDataStore){}
43 | ```
44 | - 增量更新方法
45 | - 通过加载知识图谱方法返回的对象来增量添加数据
46 | ```java
47 | //@param dataSource udb文件的路径
48 | //@param arType 要增加的类型,类型为udb中数据集的名称
49 | public boolean addKnowledgeGraph(String dataSource, String[] arType){}
50 | ```
51 | - 查询图谱方法
52 | - 通过加载知识图谱方法返回的对象来查询图谱,查询经纬度必须为WGS84,半径单位为米
53 | ```java
54 | //@param dLatitude 搜索点的纬度
55 | //@param dLongitude 搜索点的经度
56 | //@param iRadius 搜索半径,单位:米
57 | //@param arType 感兴趣的类型,具体名称也为udb数据源显示的数据集名称
58 | public HashMap> queryKnowledgeGraph(double dLatitude, double dLongitude, double iRadius,String[] arType){}
59 | //@param time 地理实体的时间
60 | public HashMap> queryKnowledgeGraph(double dLatitude, double dLongitude, double iRadius,String[] arType,String time){}
61 | ```
62 | - 查询返回对象 HashMap> 介绍
63 | - HashMap的key为您输入的类型,vaule为该类型的实体
64 | - RecordSetEntity类目前有两个属性,分别为point和mingCheng,分别为实体的经纬度与名称,可以通过get()获得
65 | - 注意:使用recordSet.getMingCheng()获取的可能为null,因为有些数据集可能没有名称字段,目前的处理方式为:没有名称字段便寻找位置字段,然后寻找区县字段,都没有则置为null
66 | ```java
67 | //查看搜索返回的各个类型的实体个数
68 | for (Entry> entry : result.entrySet()) {
69 | System.out.println(entry.getKey()+":个数"+entry.getValue().size());
70 | }
71 |
72 | //查询各个类型的具体实体信息
73 | for (Entry> entry : result.entrySet()) {
74 | System.out.println(entry.getKey()+":个数"+entry.getValue().size());
75 | for (RecordSetEntity recordSet : entry.getValue()) {
76 | System.out.println("\t"+recordSet.getMingCheng()+recordSet.getPoint());
77 | }
78 | }
79 | ```
80 |
81 |
82 | ---
83 | ## 用前须知
84 | - 运行本项目需要有iobjects的运行权限,首先需要确保可以正常使用iobjects。
85 | - 目前只支持udb文件
86 |
87 | ---
88 | ## 总结
89 | 项目从无到有,从知识图谱的基础知识、构建方式、数据库选型,到目前Demo阶段性的完成,耗费了不少心神,由于本项目定位为Demo,难免有很多问题,欢迎各位对知识图谱和地理信息有兴趣的同学加入,共同维护。
90 |
--------------------------------------------------------------------------------
/SampleData/sample.udb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/SampleData/sample.udb
--------------------------------------------------------------------------------
/SampleData/sample.udd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/SampleData/sample.udd
--------------------------------------------------------------------------------
/image/company.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/image/company.png
--------------------------------------------------------------------------------
/image/result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/image/result.png
--------------------------------------------------------------------------------
/image/shiyi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/image/shiyi.png
--------------------------------------------------------------------------------
/jar/2.0/geokg-2.0-jar-with-dependencies.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/jar/2.0/geokg-2.0-jar-with-dependencies.jar
--------------------------------------------------------------------------------
/jar/2.0/geokg-2.0-sources.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/jar/2.0/geokg-2.0-sources.jar
--------------------------------------------------------------------------------
/jar/2.0/geokg-2.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SuperMap/ATLab-KnowledgeGraph/b9aaa8d44c578301c29c8977db6db0886749bd4e/jar/2.0/geokg-2.0.jar
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | com.supermap
5 | geokg
6 | 2.0
7 | jar
8 |
9 |
10 | UTF-8
11 | 7
12 | 7
13 |
14 |
15 |
16 |
17 | junit
18 | junit
19 | 3.8.1
20 | test
21 |
22 |
25 |
26 |
27 | com.vividsolutions
28 | jts
29 | 1.13
30 |
31 |
32 | io.sgr
33 | s2-geometry-library-java
34 | 1.0.0
35 |
36 |
37 | com.supermap
38 | data
39 | 9.1.2
40 |
41 |
44 |
45 |
46 | org.eclipse.rdf4j
47 | rdf4j-model
48 | 2.5.2
49 |
50 |
51 |
52 | org.eclipse.rdf4j
53 | rdf4j-rio-api
54 | 2.5.2
55 |
56 |
57 |
58 | org.eclipse.rdf4j
59 | rdf4j-repository-api
60 | 2.5.2
61 |
62 |
63 |
64 | org.eclipse.rdf4j
65 | rdf4j-repository-sail
66 | 2.5.2
67 |
68 |
69 |
70 | org.eclipse.rdf4j
71 | rdf4j-sail-memory
72 | 2.5.2
73 |
74 |
75 |
76 | org.eclipse.rdf4j
77 | rdf4j-sail-nativerdf
78 | 2.5.2
79 |
80 |
81 |
82 |
83 |
84 |
85 | org.apache.maven.plugins
86 | maven-assembly-plugin
87 | 2.5.5
88 |
89 |
90 | jar-with-dependencies
91 |
92 |
93 |
94 |
95 | make-assembly
96 | package
97 |
98 | single
99 |
100 |
101 |
102 |
103 |
104 | org.apache.maven.plugins
105 | maven-surefire-plugin
106 | 2.18.1
107 |
108 | true
109 |
110 |
111 |
112 |
113 | org.apache.maven.plugins
114 | maven-source-plugin
115 | 3.0.0
116 |
117 |
118 |
119 |
120 | compile
121 |
122 |
123 | jar-no-fork
124 |
125 |
126 |
127 |
128 |
129 |
130 |
135 |
140 |
143 |
144 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/geoknowledge/KnowledgeGraph.java:
--------------------------------------------------------------------------------
1 | package www.supermap.geoknowledge;
2 |
3 | import java.io.BufferedReader;
4 | import java.io.File;
5 | import java.io.FileNotFoundException;
6 | import java.io.FileOutputStream;
7 | import java.io.FileReader;
8 | import java.io.IOException;
9 | import java.util.ArrayList;
10 | import java.util.HashMap;
11 |
12 | import com.google.common.geometry.S2CellId;
13 | import com.google.common.geometry.S2LatLng;
14 | import com.supermap.data.CoordSysTransMethod;
15 | import com.supermap.data.CoordSysTransParameter;
16 | import com.supermap.data.CoordSysTranslator;
17 | import com.supermap.data.Dataset;
18 | import com.supermap.data.Datasource;
19 | import com.supermap.data.DatasourceConnectionInfo;
20 | import com.supermap.data.EngineType;
21 | import com.supermap.data.PrjCoordSys;
22 | import com.supermap.data.Workspace;
23 |
24 | import www.supermap.model.iobjects.ObjectGrid;
25 | import www.supermap.model.iobjects.RecordSetEntity;
26 | import www.supermap.utils.Common;
27 | import www.supermap.utils.Iobjects;
28 | import www.supermap.utils.Rdf4j;
29 | import www.supermap.utils.S2;
30 |
31 | /**
32 | * 知识图谱类,包括图谱的构建、数据的添加、搜索功能
33 | * @author SunYasong
34 | *
35 | */
36 | public class KnowledgeGraph {
37 | // 知识图谱的配置文件
38 | private static final String CONFIGFILE = "KnowledgeGraph.conf";
39 | // 图谱存储目录
40 | private static final String KNOWLEDGE_STORE_DIR = "KnowledgeStore";
41 | // 原始数据存储目录
42 | private static final String ORIGIN_DATA_DIR = "OriginData";
43 | // 数据存储的根目录
44 | private String storeDir;
45 | // 图谱构建使用的网格级别
46 | private int gridLevel = 13;
47 |
48 | /**
49 | * 加载默认配置下的图谱,如果不存在则构建一个空的知识图谱。默认参数:网格级别为13,图谱存储目录为当前项目根目录的GeoKnowledgeStore\\
50 | */
51 | public KnowledgeGraph() {
52 |
53 | }
54 |
55 | /**
56 | * 有参的构造函数
57 | * @param gridLevel 网格层级
58 | * @param storeDir 存储路径
59 | */
60 | private KnowledgeGraph(int gridLevel, String storeDir) {
61 | // TODO Auto-generated constructor stub
62 | this.gridLevel = gridLevel;
63 | this.storeDir = storeDir;
64 | }
65 |
66 | public String getStoreDir() {
67 | File file = new File(storeDir);
68 | return file.getAbsolutePath();
69 | }
70 |
71 | public int getGridLevel() {
72 | return gridLevel;
73 | }
74 |
75 | private String getKnowledgeGraphStorePath() {
76 | return this.storeDir + File.separator + KNOWLEDGE_STORE_DIR;
77 | }
78 |
79 | private String getOriginDataStorePath() {
80 | return this.storeDir + File.separator + ORIGIN_DATA_DIR;
81 | }
82 |
83 | private String getConfigFilePath() {
84 | return this.storeDir + File.separator + CONFIGFILE;
85 | }
86 |
87 | /**
88 | * 根据网格等级和图谱存储目录的路径来构建一个知识图谱
89 | *
90 | * @param iGridLevel 网格等级
91 | * @param strDataStore 图谱存储路径
92 | * @return 创建成功返回True,否则返回fasle
93 | */
94 | public static boolean createKnowledgeGraph(int iGridLevel, String strDataStore) {
95 | // KnowledgeGraph know = new KnowledgeGraph(iGridLevel,strDataStore);
96 | graphInit(strDataStore, iGridLevel);
97 | return true;
98 | }
99 |
100 | /**
101 | * 根据网格边长和图谱存储目录的路径来构建一个新的知识图谱
102 | *
103 | * @param iGridLength 网格边长
104 | * @param strDataStore 图谱存储路径
105 | * @return 构建成功则返回True,否则返回False
106 | */
107 | public static boolean createKnowledgeGraph(double iGridLength, String strDataStore) {
108 | int gridLength = S2.getCellLevelFromLength(iGridLength);
109 | return createKnowledgeGraph(gridLength, strDataStore);
110 | }
111 |
112 |
113 |
114 | /**
115 | * 按照图谱存储路径加载一个已经存在的知识图谱
116 | * @param strDataStore 图谱存储路径
117 | * @return KnowledgeGraph类
118 | */
119 | public static KnowledgeGraph loadKnowledgeGraph(String strDataStore) {
120 | // 检查目录合法性
121 | KnowledgeGraph know = null;
122 | boolean checkedDir = Common.checkDir(strDataStore);
123 | if (!checkedDir) {
124 | System.out.println("载入知识图谱路径错误,请指定正确的路径名");
125 | System.exit(1);
126 | }
127 | // 将对象路径改为完整路径
128 | String fullStrDataStore = new File(strDataStore).getAbsolutePath();
129 | HashMap confInfo = getConfInfo(fullStrDataStore);
130 | // System.out.println(fullStrDataStore);
131 | if (confInfo.isEmpty()) {
132 | System.out.println("图谱配置信息载入失败,请重新指定图谱路径或删除当前图谱");
133 | System.exit(1);
134 | }
135 | // 一.目录下有配置文件,
136 | else {
137 | int gridLevel = Integer.valueOf(confInfo.get("gridLevel"));
138 | String storeDir = confInfo.get("storeDir");
139 | know = new KnowledgeGraph(gridLevel, storeDir);
140 | }
141 | System.out.println("成功加载知识图谱");
142 | return know;
143 | }
144 |
145 | /**
146 | * 从指定数据源读取指定地理实体,存入构建好的知识图谱中
147 | *
148 | * @param dataSource 数据源路径,目前支持UDB所在目录
149 | * @param arType 数据源中想要添加到知识图谱中的地理实体类型
150 | * @return 添加成功返回True,否则返回False
151 | */
152 | public boolean addKnowledgeGraph(String dataSource, String[] arType) {
153 | // 将指定数据集存入知识图谱数据源
154 | ArrayList storeDataSetsIds = this.storeDataSource(dataSource, arType);
155 | // 得到数据源中符合指定类型的所有数据集
156 | // ArrayList gisData =
157 | // ProcessData.getGisDataFromDataSource(dataSource,geoTypes);
158 | // ArrayList gridModels =
159 | // ProcessData.getKnowledgeGraphModel(gisData,this.gridLevel);
160 | // Boolean bo = Rdf4j.writeToKnowledgeGraph(gridModels, this.storeDir);
161 | for (String dataSetId : storeDataSetsIds) {
162 | ArrayList gisData = Iobjects
163 | .getGisDataFromDataSet(this.getOriginDataStorePath(), dataSetId);
164 | // 生成可以存入知识图谱的数据模型-Grid
165 | ArrayList gridModels = Iobjects.getKnowledgeGraphModelFromObjects(gisData, this.gridLevel);
166 | // 将数据增量存入知识图谱
167 | Boolean bo = Rdf4j.writeToKnowledgeGraphFromObject(gridModels, this.getKnowledgeGraphStorePath());
168 | System.out.println(dataSetId.split("_")[2] + " 已存储到知识图谱");
169 | }
170 | System.out.println("增量更新完毕");
171 | return true;
172 | }
173 |
174 | /**
175 | * 通过指定经纬度和半径构建缓冲区,从当前图谱中查询出符合候选类型的信息
176 | *
177 | * @param dLatitude
178 | * 纬度
179 | * @param dLongitude
180 | * 经度
181 | * @param iRadius
182 | * 缓冲区半径
183 | * @param arType
184 | * 地理实体类型
185 | * @return RecordSet
186 | */
187 | public HashMap> queryKnowledgeGraph(double dLatitude, double dLongitude,
188 | double iRadius, String[] arType) {
189 | // 判断经纬度位于哪个网格
190 | S2LatLng laln = S2LatLng.fromDegrees(dLatitude, dLongitude);
191 | S2CellId cell = S2CellId.fromLatLng(laln).parent(this.gridLevel);
192 | // 使用S2缓冲分析,得到缓冲区内的所有网格
193 | ArrayList coverCells = S2.getCoveringCellIdsFromCell(cell, iRadius, this.gridLevel);
194 | // 从知识图谱中获得指定类型的id,key为类型,vaule为符合key类型的cellid
195 | HashMap> idResults = Rdf4j
196 | .queryGeoFromMultiCellsAndGeoTypes(this.getKnowledgeGraphStorePath(), coverCells, arType);
197 | // 通过id从源文件中取RecordSet
198 | HashMap> recordSetResults = Iobjects.getRecordSetFromIds(idResults,
199 | this.getOriginDataStorePath());
200 | // HashMap> searchResults =
201 | // Rdf4j.queryGeoInfoFromMultiCellsAndGeoTypes(this.getKnowledgeGraphStorePath(),this.getOriginDataStorePath(),coverCells,geoTypes);
202 | return recordSetResults;
203 | }
204 |
205 |
206 | /**
207 | * 通过指定经纬度和半径构建缓冲区,从当前图谱中查询出符合候选类型的信息
208 | * @param dLatitude 纬度
209 | * @param dLongitude 经度
210 | * @param iRadius 缓冲区半径
211 | * @param arType 实体类型
212 | * @param time 实体的时间
213 | * @return
214 | */
215 | public HashMap> queryKnowledgeGraph(double dLatitude, double dLongitude,
216 | double iRadius, String[] arType , String time) {
217 | // 判断经纬度位于哪个网格
218 | S2LatLng laln = S2LatLng.fromDegrees(dLatitude, dLongitude);
219 | S2CellId cell = S2CellId.fromLatLng(laln).parent(this.gridLevel);
220 | // 使用S2缓冲分析,得到缓冲区内的所有网格
221 | ArrayList coverCells = S2.getCoveringCellIdsFromCell(cell, iRadius, this.gridLevel);
222 | // 从知识图谱中获得指定类型的id,key为类型,vaule为符合key类型的cellid
223 | HashMap> idResults = Rdf4j.queryGeoFromMultiCellsAndGeoTypes(this.getKnowledgeGraphStorePath(), coverCells, arType, time);
224 | // 通过id从源文件中取RecordSet
225 | HashMap> recordSetResults = Iobjects.getRecordSetFromIds(idResults,
226 | this.getOriginDataStorePath());
227 | // HashMap> searchResults =
228 | // Rdf4j.queryGeoInfoFromMultiCellsAndGeoTypes(this.getKnowledgeGraphStorePath(),this.getOriginDataStorePath(),coverCells,geoTypes);
229 | return recordSetResults;
230 | }
231 |
232 | /**
233 | * 图谱初始化,加载目录下的配置文件,没有的话直接构建
234 | *
235 | * @param storeDir 存储路径
236 | * @param gridLevel 网格级别
237 | */
238 | private static void graphInit(String storeDir, int gridLevel) {
239 | // 检查目录合法性
240 | boolean checkedDir = Common.checkDir(storeDir);
241 | // 将对象路径改为完整路径
242 | String fullStoreDir = new File(storeDir).getAbsolutePath();
243 | HashMap confInfo = getConfInfo(fullStoreDir);
244 | // 目录下没有配置文件,按照输入网格级别直接创建并加载
245 | if (checkedDir && confInfo.isEmpty()) {
246 | if (initDataStore(storeDir, gridLevel)) {
247 | System.out.println("初始化成功,成功构建空图谱");
248 | }
249 | }
250 | // 一.目录下有配置文件,
251 | else {
252 | System.out.println("指定路径下已有知识图谱,请重新指定存储路径或删除当前图谱");
253 | System.exit(1);
254 | }
255 |
256 | }
257 |
258 | /**
259 | * 获取图谱存储路径下的配置文件,
260 | * @param storeDir 图谱存储路径
261 | * @return 没有则返回空HashMap
262 | */
263 | private static HashMap getConfInfo(String storeDir) {
264 | String filePath = storeDir + File.separator + CONFIGFILE;
265 | // System.out.println(filePath);
266 | File file = new File(filePath);
267 | // 一行一行读取内容放到集合中
268 | ArrayList allInfos = new ArrayList();
269 | try {
270 | BufferedReader br = new BufferedReader(new FileReader(file));
271 | String s = null;
272 | try {
273 | while ((s = br.readLine()) != null) {
274 | allInfos.add(s);
275 | }
276 | } catch (IOException e) {
277 | // TODO Auto-generated catch block
278 | e.printStackTrace();
279 | }
280 | } catch (FileNotFoundException e) {
281 | // TODO Auto-generated catch block
282 | // e.printStackTrace();
283 | // System.out.println("没有找到配置文件:"+file.getAbsolutePath());
284 | return new HashMap();
285 | }
286 | // 将一行一行内容处理成map形式
287 | HashMap confInfos = new HashMap();
288 | for (String str : allInfos) {
289 | String[] info = str.split("=");
290 | try {
291 | confInfos.put(info[0], info[1]);
292 | } catch (Exception e) {
293 | // TODO Auto-generated catch block
294 | e.printStackTrace();
295 | System.out.println("配置文件错误,请删除图谱存储文件夹,重新生成图谱");
296 | System.exit(1);
297 | }
298 | }
299 | return confInfos;
300 | }
301 |
302 | /**
303 | * 初始化数据仓库,包括生成配置文件,新建知识图谱存储目录和原始数据存储目录。 将配置信息写入图谱文件夹的KnowledgeGraph.conf
304 | *
305 | * @param storeDir
306 | * @param gridLevel
307 | */
308 | private static boolean initDataStore(String storeDir, int gridLevel) {
309 | // TODO Auto-generated method stub
310 | // 固定gridLevel的范围,小于0则为0,大于20则为20
311 | if (gridLevel < 0) {
312 | gridLevel = 0;
313 | } else if (gridLevel > 20) {
314 | gridLevel = 20;
315 | }
316 | // 将配置信息写入配置文件
317 | File confFile = new File(storeDir + File.separator + CONFIGFILE);
318 | String confContent = "";
319 | String confGridLevel = "gridLevel=" + gridLevel + "\n";
320 | String absolutePath = "storeDir=" + confFile.getAbsolutePath().substring(0,
321 | confFile.getAbsolutePath().length() - CONFIGFILE.length() - 1);
322 | confContent = confGridLevel + absolutePath;
323 | try {
324 | FileOutputStream fos = new FileOutputStream(confFile);
325 | fos.write(confContent.getBytes());
326 | } catch (IOException e) {
327 | System.out.println(confFile.getAbsolutePath() + "打开失败");
328 | System.exit(1);
329 | }
330 | // 新建知识图存储目录以及源数据(udb文件)存储目录
331 | File knowledgeDir = new File(storeDir + File.separator + KNOWLEDGE_STORE_DIR);
332 | knowledgeDir.mkdirs();
333 | File originDataDir = new File(storeDir + File.separator + ORIGIN_DATA_DIR);
334 | originDataDir.mkdirs();
335 | return true;
336 | }
337 |
338 | /**
339 | * 将要添加到知识图谱中的数据集添加到知识图谱数据源,并返回添加进的数据集id。只支持udb
340 | *
341 | * @param dataSource 数据源
342 | * @param geoTypes 类型
343 | * @return ArrayList 存储完成的数据集
344 | */
345 | private ArrayList storeDataSource(String dataSource, String[] geoTypes) {
346 | ArrayList storeDataSetsIds = new ArrayList();
347 | // 获得符合条件的数据集
348 | ArrayList allDataSets = Iobjects.getAllDataSets(dataSource);
349 | ArrayList dataSets = Iobjects.filterDataSetByAssignGeoTypes(allDataSets, geoTypes);
350 | // 1.将数据集存储到图谱源文件
351 | // 1.1 获得当前数据集要存储的数据源与数据集文件名
352 | String dataSetStartId = Iobjects.getEndDataSetId(this.getOriginDataStorePath());
353 | // System.out.println(dataSetStartId);
354 | for (Dataset dataSet : dataSets) {
355 | // 判断该数据集是否已存储过,需要存储则返回true
356 | boolean needStore = Iobjects.dataSetNeedStoreOrNot(dataSet, this.getOriginDataStorePath());
357 | if (!needStore) {
358 | continue;
359 | }
360 | String currentWholeIndexId = Iobjects.getNextDataSetId(dataSetStartId, this.getOriginDataStorePath());
361 | dataSetStartId = currentWholeIndexId;
362 | String[] idSplits = currentWholeIndexId.split("_");
363 | String currentDataSourceIndexId = idSplits[0];
364 | String currentDataSetIndexId = idSplits[1];
365 | // 数据集名称
366 | String currentDataSetName = dataSet.getName();
367 | String targetDataSetName = currentDataSetIndexId + "_" + currentDataSetName;
368 | String targetDataSetWholeId = currentDataSourceIndexId + "_" + targetDataSetName;
369 | String dataSourceServer = this.getOriginDataStorePath() + File.separator + currentDataSourceIndexId
370 | + ".udb";
371 | // System.out.println(dataSourceServer);
372 | // 1.2 转换坐标系,将数据集存储到指定数据源
373 | DatasourceConnectionInfo dscio = new DatasourceConnectionInfo();
374 | dscio.setEngineType(EngineType.UDB);
375 | dscio.setServer(dataSourceServer);
376 | Datasource targetDataSource = new Workspace().getDatasources().open(dscio);
377 | PrjCoordSys targetPrjCoordSys = PrjCoordSys.fromEPSG(4326);
378 | CoordSysTransParameter coordSysTransParameter = new CoordSysTransParameter();
379 | CoordSysTranslator.convert(dataSet, targetPrjCoordSys, targetDataSource, targetDataSetName,
380 | coordSysTransParameter, CoordSysTransMethod.MTH_GEOCENTRIC_TRANSLATION);
381 | storeDataSetsIds.add(targetDataSetWholeId);
382 | dataSet.close();
383 | targetDataSource.close();
384 | // System.out.println(targetDataSource.isOpened());
385 | }
386 | // 输出存储的信息
387 | int geoTypesNmuber = 0;
388 | if (geoTypes == null || geoTypes.length == 0) {
389 | geoTypesNmuber = allDataSets.size();
390 | } else if (geoTypes != null || geoTypes.length != 0) {
391 | geoTypesNmuber = geoTypes.length;
392 | }
393 | System.out.println("选择的数据集个数:" + geoTypesNmuber + ",已加载数据集个数:" + storeDataSetsIds.size());
394 | return storeDataSetsIds;
395 | }
396 |
397 | }
398 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/GeoObjectEntity.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import java.util.ArrayList;
4 | /**
5 | * 抽象地理实体类,包含了一些点线面的共同点
6 | * @author SunYasong
7 | *
8 | */
9 | public abstract class GeoObjectEntity {
10 | protected int cellLevel;
11 | protected String entityType;
12 | protected String entityId;
13 | protected ArrayList cellIds;
14 | protected String time;
15 | protected GeoObjectEntity() {
16 | super();
17 | // TODO Auto-generated constructor stub
18 | }
19 | public int getCellLevel() {
20 | return cellLevel;
21 | }
22 | public void setCellLevel(int cellLevel) {
23 | this.cellLevel = cellLevel;
24 | }
25 | public String getEntityType() {
26 | return entityType;
27 | }
28 | public void setEntityType(String entityType) {
29 | this.entityType = entityType;
30 | }
31 | public String getEntityId() {
32 | return entityId;
33 | }
34 | public void setEntityId(String entityId) {
35 | this.entityId = entityId;
36 | }
37 | public ArrayList getCellIds() {
38 | return cellIds;
39 | }
40 | public void setCellIds(ArrayList cellIds) {
41 | this.cellIds = cellIds;
42 | }
43 |
44 | public String getTime() {
45 | return time;
46 | }
47 | public void setTime(String time) {
48 | this.time = time;
49 | }
50 | @Override
51 | public int hashCode() {
52 | final int prime = 31;
53 | int result = 1;
54 | result = prime * result + ((cellIds == null) ? 0 : cellIds.hashCode());
55 | result = prime * result + cellLevel;
56 | result = prime * result + ((entityId == null) ? 0 : entityId.hashCode());
57 | result = prime * result + ((entityType == null) ? 0 : entityType.hashCode());
58 | return result;
59 | }
60 | @Override
61 | public boolean equals(Object obj) {
62 | if (this == obj)
63 | return true;
64 | if (obj == null)
65 | return false;
66 | if (getClass() != obj.getClass())
67 | return false;
68 | GeoObjectEntity other = (GeoObjectEntity) obj;
69 | if (cellIds == null) {
70 | if (other.cellIds != null)
71 | return false;
72 | } else if (!cellIds.equals(other.cellIds))
73 | return false;
74 | if (cellLevel != other.cellLevel)
75 | return false;
76 | if (entityId == null) {
77 | if (other.entityId != null)
78 | return false;
79 | } else if (!entityId.equals(other.entityId))
80 | return false;
81 | if (entityType == null) {
82 | if (other.entityType != null)
83 | return false;
84 | } else if (!entityType.equals(other.entityType))
85 | return false;
86 | return true;
87 | }
88 | @Override
89 | public String toString() {
90 | return "GeoEntity [cellLevel=" + cellLevel + ", entityType=" + entityType + ", entityId=" + entityId
91 | + ", cellIds=" + cellIds + "]";
92 | }
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/LineObjectEntity.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import com.supermap.data.GeoLine;
4 | /**
5 | * 线实体
6 | * @author SunYasong
7 | *
8 | */
9 | public class LineObjectEntity extends GeoObjectEntity{
10 |
11 | private GeoLine line;
12 | public LineObjectEntity(GeoLine line, String entityType,String entityId) {
13 | // TODO Auto-generated constructor stub
14 | this.entityId = entityId;
15 | this.line =line;
16 | this.entityType = entityType;
17 | }
18 | public GeoLine getLine() {
19 | return line;
20 | }
21 | public void setLine(GeoLine line) {
22 | this.line = line;
23 | }
24 | @Override
25 | public int hashCode() {
26 | final int prime = 31;
27 | int result = super.hashCode();
28 | result = prime * result + ((line == null) ? 0 : line.hashCode());
29 | return result;
30 | }
31 | @Override
32 | public boolean equals(Object obj) {
33 | if (this == obj)
34 | return true;
35 | if (!super.equals(obj))
36 | return false;
37 | if (getClass() != obj.getClass())
38 | return false;
39 | LineObjectEntity other = (LineObjectEntity) obj;
40 | if (line == null) {
41 | if (other.line != null)
42 | return false;
43 | } else if (!line.equals(other.line))
44 | return false;
45 | return true;
46 | }
47 | @Override
48 | public String toString() {
49 | return "MultiLine [multiLine=" + line + "]";
50 | }
51 |
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/ObjectGrid.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import java.util.ArrayList;
4 | /**
5 | * 网格实体,组合了地理实体
6 | * @author SunYasong
7 | *
8 | */
9 | public class ObjectGrid {
10 | //网格的id
11 | private Long id;
12 | //空间实体对象
13 | private ArrayList geoEntitys;
14 |
15 | public ObjectGrid(Long id, ArrayList geoEntitys) {
16 | super();
17 | this.id = id;
18 | this.geoEntitys = geoEntitys;
19 | }
20 |
21 | public Long getId() {
22 | return id;
23 | }
24 | public void setId(Long id) {
25 | this.id = id;
26 | }
27 | public ArrayList getGeoEntitys() {
28 | return geoEntitys;
29 | }
30 | public void setGeoEntitys(ArrayList geoEntitys) {
31 | this.geoEntitys = geoEntitys;
32 | }
33 |
34 | @Override
35 | public int hashCode() {
36 | final int prime = 31;
37 | int result = 1;
38 | result = prime * result + ((geoEntitys == null) ? 0 : geoEntitys.hashCode());
39 | result = prime * result + ((id == null) ? 0 : id.hashCode());
40 | return result;
41 | }
42 |
43 | @Override
44 | public boolean equals(Object obj) {
45 | if (this == obj)
46 | return true;
47 | if (obj == null)
48 | return false;
49 | if (getClass() != obj.getClass())
50 | return false;
51 | ObjectGrid other = (ObjectGrid) obj;
52 | if (geoEntitys == null) {
53 | if (other.geoEntitys != null)
54 | return false;
55 | } else if (!geoEntitys.equals(other.geoEntitys))
56 | return false;
57 | if (id == null) {
58 | if (other.id != null)
59 | return false;
60 | } else if (!id.equals(other.id))
61 | return false;
62 | return true;
63 | }
64 |
65 | @Override
66 | public String toString() {
67 | return "Grid [id=" + id + ", geoEntitys=" + geoEntitys + "]";
68 | }
69 |
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/PointObjectEntity.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import com.supermap.data.GeoPoint;
4 | /**
5 | * 点实体
6 | * @author SunYasong
7 | *
8 | */
9 | public class PointObjectEntity extends GeoObjectEntity{
10 | private GeoPoint point;
11 | public PointObjectEntity(GeoPoint point,String entityType,String entityId) {
12 | super();
13 | this.entityType = entityType;
14 | this.point = point;
15 | this.entityId = entityId;
16 | }
17 | public GeoPoint getPoint() {
18 | return point;
19 | }
20 | public void setPoint(GeoPoint point) {
21 | this.point = point;
22 | }
23 | @Override
24 | public int hashCode() {
25 | final int prime = 31;
26 | int result = super.hashCode();
27 | result = prime * result + ((point == null) ? 0 : point.hashCode());
28 | return result;
29 | }
30 | @Override
31 | public boolean equals(Object obj) {
32 | if (this == obj)
33 | return true;
34 | if (!super.equals(obj))
35 | return false;
36 | if (getClass() != obj.getClass())
37 | return false;
38 | PointObjectEntity other = (PointObjectEntity) obj;
39 | if (point == null) {
40 | if (other.point != null)
41 | return false;
42 | } else if (!point.equals(other.point))
43 | return false;
44 | return true;
45 | }
46 | @Override
47 | public String toString() {
48 | return "PointEntity [point=" + point + "]";
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/RecordSetEntity.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import java.io.File;
4 |
5 | import com.supermap.data.CursorType;
6 | import com.supermap.data.Dataset;
7 | import com.supermap.data.DatasetVector;
8 | import com.supermap.data.Datasource;
9 | import com.supermap.data.DatasourceConnectionInfo;
10 | import com.supermap.data.EngineType;
11 | import com.supermap.data.Geometry;
12 | import com.supermap.data.Point2D;
13 | import com.supermap.data.Recordset;
14 | import com.supermap.data.Workspace;
15 | /**
16 | * 记录集实体
17 | * @author SunYasong
18 | *
19 | */
20 | public class RecordSetEntity {
21 | //记录集在图谱中的id,通过id可以去对应数据源、数据集查看记录集
22 | private String recordId;
23 | // private Recordset recordSet;
24 | //记录集所在数据集的路径
25 | private String dataDtoreDir;
26 | //记录的类型,即数据集的名称
27 | private String entityType;
28 | //记录集所代表的图形的内点
29 | private Point2D point;
30 | //记录集中的一个字段,名称
31 | private String mingCheng;
32 | //时间
33 | private String shiJian;
34 | public RecordSetEntity(String recordId, String dataDtoreDir, String entityType) {
35 | this.recordId = recordId;
36 | this.dataDtoreDir = dataDtoreDir;
37 | this.entityType = entityType;
38 | getInfoByRecordId(recordId, dataDtoreDir, entityType);
39 | }
40 |
41 | public RecordSetEntity(Recordset recordSet) {
42 | getRequiredInfo(recordSet);
43 | }
44 |
45 | public Point2D getPoint() {
46 | return point;
47 | }
48 |
49 | public String getMingCheng() {
50 | return mingCheng;
51 | }
52 |
53 | /**
54 | * 获得recordset中所需要的字段
55 | *
56 | * @param recordId
57 | * @param dataDtoreDir
58 | * @param entityType
59 | */
60 | private void getInfoByRecordId(String recordId, String dataDtoreDir, String entityType) {
61 | // TODO Auto-generated method stub
62 | String[] idSplits = recordId.split("_");
63 | String dataSourceId = idSplits[0];
64 | String dataSetId = idSplits[1];
65 | int recordIndex = Integer.valueOf(idSplits[2]);
66 | // 获得recordset
67 | Workspace workSpace = new Workspace();
68 | DatasourceConnectionInfo dataSourceConnectionInfo = new DatasourceConnectionInfo();
69 | dataSourceConnectionInfo.setServer(dataDtoreDir + File.separator + dataSourceId + ".udb");
70 | dataSourceConnectionInfo.setEngineType(EngineType.UDB);
71 |
72 | Datasource dataSource = null;
73 | try {
74 | dataSource = workSpace.getDatasources().open(dataSourceConnectionInfo);
75 | } catch (Exception e) {
76 | }
77 | if (dataSource != null) {
78 | Dataset dataSet = dataSource.getDatasets().get(dataSetId + "_" + entityType);
79 | DatasetVector dataSetVector = (DatasetVector) dataSet;
80 | Recordset recordSet = dataSetVector.getRecordset(false, CursorType.STATIC);
81 | recordSet.moveTo(recordIndex);
82 | // 通过recordset取出所需要的字段值
83 | getRequiredInfo(recordSet);
84 | dataSource.close();
85 | }
86 | }
87 |
88 | /**
89 | * 通过recordset来获取类似名称字段值,有的记录集没有名称字段,则返回类似的字段,如:位置、区县
90 | *
91 | * @param recordSet
92 | */
93 | private void getRequiredInfo(Recordset recordSet) {
94 | // TODO Auto-generated method stub
95 | // 取出实体位于的经纬度
96 | Geometry geometry = recordSet.getGeometry();
97 | this.point = geometry.getInnerPoint();
98 | this.shiJian = recordSet.getFieldValue("sj").toString();
99 | // 获取位置信息
100 | // 不是所有的实体都有名称字段,因此首先检查名称字段,没有的话检查位置,再检查区县,再没有就直接用null代替
101 | try {
102 | this.mingCheng = (String) recordSet.getFieldValue("mc");
103 | } catch (Exception e) {
104 | // TODO: handle exception
105 | try {
106 | this.mingCheng = (String) recordSet.getFieldValue("wz");
107 | } catch (Exception e2) {
108 | // TODO: handle exception
109 | try {
110 | this.mingCheng = (String) recordSet.getFieldValue("qx");
111 | } catch (Exception e3) {
112 | // TODO: handle exception
113 | this.mingCheng = null;
114 | }
115 | }
116 | }
117 | }
118 |
119 | /**
120 | * 如果添加或删除了字段要记得重新生成toString
121 | */
122 | @Override
123 | public String toString() {
124 | return "[point=" + point + ", mingCheng=" + mingCheng + "]";
125 | }
126 |
127 | private static Recordset getRecordSetById(String recordId, String dataDtoreDir, String entityType) {
128 | String[] idSplits = recordId.split("_");
129 | String dataSourceId = idSplits[0];
130 | String dataSetId = idSplits[1];
131 | int recordIndex = Integer.valueOf(idSplits[2]);
132 | // 读取数据
133 | Workspace workSpace = new Workspace();
134 | DatasourceConnectionInfo dataSourceConnectionInfo = new DatasourceConnectionInfo();
135 | dataSourceConnectionInfo.setServer(dataDtoreDir + File.separator + dataSourceId + ".udb");
136 | dataSourceConnectionInfo.setEngineType(EngineType.UDB);
137 | Datasource dataSource = workSpace.getDatasources().open(dataSourceConnectionInfo);
138 | Dataset dataSet = dataSource.getDatasets().get(dataSetId + "_" + entityType);
139 | DatasetVector dataSetVector = (DatasetVector) dataSet;
140 | Recordset recordSet = dataSetVector.getRecordset(false, CursorType.STATIC);
141 | recordSet.moveTo(recordIndex);
142 | dataSource.close();
143 | return recordSet;
144 | }
145 |
146 | }
147 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/model/iobjects/RegionObjectEntity.java:
--------------------------------------------------------------------------------
1 | package www.supermap.model.iobjects;
2 |
3 | import com.supermap.data.GeoRegion;
4 | /**
5 | * 面实体
6 | * @author SunYasong
7 | *
8 | */
9 | public class RegionObjectEntity extends GeoObjectEntity{
10 |
11 | private GeoRegion region;
12 |
13 | public RegionObjectEntity(GeoRegion region,String entityType,String entityId) {
14 | // TODO Auto-generated constructor stub
15 | this.entityId = entityId;
16 | this.region = region;
17 | this.entityType = entityType;
18 | }
19 |
20 | public GeoRegion getMultiPolygon() {
21 | return region;
22 | }
23 |
24 | public void setMultiPolygon(GeoRegion region) {
25 | this.region = region;
26 | }
27 |
28 | @Override
29 | public int hashCode() {
30 | final int prime = 31;
31 | int result = super.hashCode();
32 | result = prime * result + ((region == null) ? 0 : region.hashCode());
33 | return result;
34 | }
35 |
36 | @Override
37 | public boolean equals(Object obj) {
38 | if (this == obj)
39 | return true;
40 | if (!super.equals(obj))
41 | return false;
42 | if (getClass() != obj.getClass())
43 | return false;
44 | RegionObjectEntity other = (RegionObjectEntity) obj;
45 | if (region == null) {
46 | if (other.region != null)
47 | return false;
48 | } else if (!region.equals(other.region))
49 | return false;
50 | return true;
51 | }
52 |
53 | @Override
54 | public String toString() {
55 | return "RegionEntity [multiPolygon=" + region + "]";
56 | }
57 |
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/utils/Common.java:
--------------------------------------------------------------------------------
1 | package www.supermap.utils;
2 |
3 | import java.io.File;
4 | import java.nio.file.FileSystemException;
5 |
6 | /**
7 | * 一般性的工具类
8 | *
9 | * @author SunYasong
10 | *
11 | */
12 | public class Common {
13 | /**
14 | * 检查目录合法性-是否以\结尾,合法的话不存在则创建,不合法报错。
15 | *
16 | * @param dir
17 | * @return 返回合法的目录名
18 | */
19 | public static boolean checkDir(String dir) {
20 | // TODO Auto-generated method stub
21 | String checkedDir = dir;
22 | File originFile = new File(dir);
23 | // 不存在则按照字符串进行判别
24 | if (!originFile.exists()) {
25 | // 通过检查字符串中有没有"."来判断是不是目录 --可能存在bug
26 | if (!dir.contains(".")) {
27 | // 是目录则创建,并返回
28 | try {
29 | originFile.mkdirs();
30 | } catch (Exception e) {
31 | // TODO: handle exception
32 | e.printStackTrace();
33 | System.out.println("请输入正确的路径");
34 | System.exit(1);
35 | }
36 | return true;
37 | } else {
38 | // 不是目录则抛异常
39 | try {
40 | throw new FileSystemException(dir);
41 | } catch (FileSystemException e) {
42 | // TODO: handle exception
43 | e.printStackTrace();
44 | System.out.println("\t" + "非法的路径名,请使用不含有.的合法目录路径");
45 | System.exit(1);
46 | return false;
47 | }
48 | }
49 | }
50 | // 存在则继续判别
51 | else {
52 | // 是目录则直接返回
53 | if (originFile.isDirectory()) {
54 | return true;
55 | }
56 | // 不是目录抛异常
57 | else {
58 | try {
59 | throw new FileSystemException(dir);
60 | } catch (FileSystemException e) {
61 | // TODO: handle exception
62 | System.out.println("java.nio.file.FileSystemException:" + originFile.getAbsolutePath());
63 | System.out.println("\t" + "设置参数应该为目录");
64 | System.exit(1);
65 | return false;
66 | }
67 | }
68 | }
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/utils/Iobjects.java:
--------------------------------------------------------------------------------
1 | package www.supermap.utils;
2 |
3 | import java.io.File;
4 | import java.util.ArrayList;
5 | import java.util.Collections;
6 | import java.util.HashMap;
7 | import java.util.HashSet;
8 |
9 | import com.supermap.data.CursorType;
10 | import com.supermap.data.Dataset;
11 | import com.supermap.data.DatasetType;
12 | import com.supermap.data.DatasetVector;
13 | import com.supermap.data.Datasets;
14 | import com.supermap.data.Datasource;
15 | import com.supermap.data.DatasourceConnectionInfo;
16 | import com.supermap.data.EngineType;
17 | import com.supermap.data.GeoLine;
18 | import com.supermap.data.GeoPoint;
19 | import com.supermap.data.GeoRegion;
20 | import com.supermap.data.Recordset;
21 | import com.supermap.data.Workspace;
22 |
23 | import www.supermap.model.iobjects.GeoObjectEntity;
24 | import www.supermap.model.iobjects.LineObjectEntity;
25 | import www.supermap.model.iobjects.ObjectGrid;
26 | import www.supermap.model.iobjects.PointObjectEntity;
27 | import www.supermap.model.iobjects.RecordSetEntity;
28 | import www.supermap.model.iobjects.RegionObjectEntity;
29 |
30 | /**
31 | * iobjects相关
32 | *
33 | * @author SunYasong
34 | *
35 | */
36 | public class Iobjects {
37 |
38 | // 存入知识图谱的数据集要存入本地数据源中,一个udb文件可以存的最大数量
39 | private static final int MAX_DATASET_NUM = 100;
40 |
41 | /**
42 | * 通过检查当前图谱源文件下存储的数据集,得到最后一个数据集的id
43 | *
44 | * @return
45 | */
46 | public static String getEndDataSetId(String storeDir) {
47 | // TODO Auto-generated method stub
48 | // 目前只使用一个数据源,id为0
49 | // 一个数据集存放100个数据集,多出的存到新数据集
50 | // 1.找到数据库文件下最后一个数据源
51 | File oriFile = new File(storeDir);
52 | File[] files = oriFile.listFiles();
53 | HashSet idSet = new HashSet();
54 | for (File file : files) {
55 | String fileWholeName = file.getName();
56 | if (!fileWholeName.endsWith("udb")) {
57 | continue;
58 | }
59 | int fileId = Integer.valueOf(fileWholeName.split("\\.")[0]);
60 | idSet.add(fileId);
61 | }
62 | // 2.找出最大的数据源id
63 | int maxId = 0;
64 | if (!idSet.isEmpty()) {
65 | maxId = Collections.max(idSet);
66 | }
67 | String currentStore = maxId + ".udb";
68 | Workspace workSpace = new Workspace();
69 | DatasourceConnectionInfo datasourceConnectionInfo = new DatasourceConnectionInfo();
70 | datasourceConnectionInfo.setServer(storeDir + File.separator + currentStore);
71 | datasourceConnectionInfo.setEngineType(EngineType.UDB);
72 | Datasource dataSource = null;
73 | try {
74 | // 打开数据源
75 | dataSource = workSpace.getDatasources().open(datasourceConnectionInfo);
76 | } catch (javax.management.RuntimeErrorException e) {
77 | // TODO: handle exception
78 | dataSource = workSpace.getDatasources().create(datasourceConnectionInfo);
79 | }
80 | int id = dataSource.getDatasets().getCount() - 1;
81 | dataSource.close();
82 | return maxId + "_D" + id;
83 | }
84 |
85 | /**
86 | * 从所有的数据集中过滤出指定类型的数据集
87 | *
88 | * @param allDataSets
89 | * @param geoTypes
90 | * @return
91 | */
92 | public static ArrayList filterDataSetByAssignGeoTypes(ArrayList allDataSets, String[] geoTypes) {
93 | // TODO Auto-generated method stub
94 | ArrayList filterDataSets = new ArrayList();
95 | if (geoTypes == null || geoTypes.length == 0) {
96 | for (Dataset dataSet : allDataSets) {
97 | try {
98 | Dataset dataSetVector = (DatasetVector) dataSet;
99 | } catch (Exception e) {
100 | // TODO: handle exception
101 | e.printStackTrace();
102 | System.out.println("目前只支持矢量数据集,请指定矢量数据集或删除数据源中的所有非矢量数据集,然后再次尝试");
103 | }
104 | }
105 | filterDataSets = allDataSets;
106 | } else {
107 | for (String type : geoTypes) {
108 | for (Dataset dataSet : allDataSets) {
109 | // 以数据集名字作为类型判别
110 | if (dataSet.getName().equals(type)) {
111 | try {
112 | Dataset dataSetVector = (DatasetVector) dataSet;
113 | } catch (Exception e) {
114 | // TODO: handle exception
115 | e.printStackTrace();
116 | System.out.println("目前只支持矢量数据集,检测到 " + type + " 为非矢量数据集,请删除该类型后重试 ");
117 | System.exit(1);
118 | }
119 | filterDataSets.add(dataSet);
120 | break;
121 | }
122 | }
123 | }
124 | }
125 | return filterDataSets;
126 | }
127 |
128 | /**
129 | * 通过字符串来获得包含的所有数据源,目前只实现了udb
130 | *
131 | * @param dataSource
132 | * @return
133 | */
134 | public static ArrayList getAllDataSets(String dataSource) {
135 | // TODO Auto-generated method stub
136 | ArrayList dataSets = new ArrayList();
137 | Workspace workSpace = new Workspace();
138 | DatasourceConnectionInfo datasourceConnectionInfo = new DatasourceConnectionInfo();
139 | // 要判断是否为smwu工作空间,udb数据源,shp文件,数据库连接字符串
140 | if (dataSource.endsWith("smwu")) {
141 | // smwu工作空间
142 | } else if (dataSource.endsWith("udb")) {
143 | // udb文件型数据源
144 | datasourceConnectionInfo.setServer(dataSource);
145 | datasourceConnectionInfo.setEngineType(EngineType.UDB);
146 | Datasource datasource = workSpace.getDatasources().open(datasourceConnectionInfo);
147 | Datasets allDataSets = datasource.getDatasets();
148 | for (int i = 0; i < allDataSets.getCount(); i++) {
149 | Dataset dataSet = allDataSets.get(i);
150 | dataSets.add(dataSet);
151 | }
152 | } else if (dataSource.endsWith("shp")) {
153 | // shp文件
154 | } else if (dataSource.endsWith("shujuku~!@#$%^&*()_+")) {
155 | // 预留给数据库
156 | }
157 | return dataSets;
158 | }
159 |
160 | /**
161 | * 获得下一个数据集要存的id,如果需要则创建新的数据源
162 | *
163 | * @param currentSetStartId
164 | * @param originDataStorePath
165 | * @return
166 | */
167 | public static String getNextDataSetId(String currentSetStartId, String originDataStorePath) {
168 | // TODO Auto-generated method stub
169 | String[] idSplits = currentSetStartId.split("_");
170 | int preDataSourceId = Integer.valueOf(idSplits[0]);
171 | int preDataSetId = Integer.valueOf(idSplits[1].substring(1));
172 | int currentDataSourceId;
173 | int currentDataSetId;
174 | if (preDataSetId >= MAX_DATASET_NUM - 1) {
175 | currentDataSourceId = preDataSourceId + 1;
176 | currentDataSetId = 0;
177 | Workspace workSpace = new Workspace();
178 | DatasourceConnectionInfo datasourceConnectionInfo = new DatasourceConnectionInfo();
179 | datasourceConnectionInfo.setServer(originDataStorePath + File.separator + currentDataSourceId + ".udb");
180 | datasourceConnectionInfo.setEngineType(EngineType.UDB);
181 | Datasource dataSource = workSpace.getDatasources().create(datasourceConnectionInfo);
182 | dataSource.close();
183 | } else {
184 | currentDataSourceId = preDataSourceId;
185 | currentDataSetId = preDataSetId + 1;
186 | }
187 | return currentDataSourceId + "_D" + currentDataSetId;
188 | }
189 |
190 | /**
191 | * 从存储到图谱数据源的数据集中读取数据
192 | *
193 | * @param storeDataSetsIds
194 | * @return
195 | */
196 | public static ArrayList getGisDataFromDataSet(String dataSourcePath, String wholeDataSetId) {
197 | // TODO Auto-generated method stub
198 | String[] idSplits = wholeDataSetId.split("_");
199 | String dataSourceId = idSplits[0];
200 | String dataSetId = idSplits[1];
201 | String entityType = idSplits[2];
202 | String recordPrefix = dataSourceId + "_" + dataSetId + "_";
203 | ArrayList geoEntities = getGeoEntityFromDataSet(dataSourcePath, dataSourceId, dataSetId,
204 | entityType, recordPrefix);
205 | return geoEntities;
206 | }
207 |
208 | /**
209 | * 从指定数据集中读取数据
210 | *
211 | * @param dataSourceId
212 | * @param dataSetId
213 | * @param entityType
214 | * @param recordPrefix
215 | * @param dataSourcePath
216 | * @return
217 | */
218 | private static ArrayList getGeoEntityFromDataSet(String dataSourcePath, String dataSourceId,
219 | String dataSetId, String entityType, String recordPrefix) {
220 | // TODO Auto-generated method stub
221 | ArrayList geoEntities = new ArrayList();
222 | // 打开数据源
223 | Workspace workSpace = new Workspace();
224 | DatasourceConnectionInfo dataSourceConnectionInfo = new DatasourceConnectionInfo();
225 | dataSourceConnectionInfo.setServer(dataSourcePath + File.separator + dataSourceId + ".udb");
226 | dataSourceConnectionInfo.setEngineType(EngineType.UDB);
227 | Datasource dataSource = workSpace.getDatasources().open(dataSourceConnectionInfo);
228 | Dataset dataSet = dataSource.getDatasets().get(dataSetId + "_" + entityType);
229 | DatasetVector dataSetVector = (DatasetVector) dataSet;
230 | Recordset recordSet = dataSetVector.getRecordset(false, CursorType.STATIC);
231 | DatasetType dataSetType = dataSetVector.getType();
232 | // 处理点线面
233 | if (dataSetType.equals(DatasetType.POINT)) {
234 | for (int i = 0; i < recordSet.getRecordCount(); i++) {
235 | GeoPoint point = (GeoPoint) recordSet.getGeometry();
236 | String entityId = recordPrefix + recordSet.getID();
237 | PointObjectEntity pointEntity = new PointObjectEntity(point, entityType, entityId);
238 | pointEntity.setTime(recordSet.getFieldValue("sj").toString());
239 | geoEntities.add(pointEntity);
240 | recordSet.moveNext();
241 | }
242 | } else if (dataSetType.equals(DatasetType.LINE)) {
243 | for (int i = 0; i < recordSet.getRecordCount(); i++) {
244 | GeoLine line = (GeoLine) recordSet.getGeometry();
245 | String entityId = recordPrefix + recordSet.getID();
246 | LineObjectEntity lineEntity = new LineObjectEntity(line, entityType, entityId);
247 | lineEntity.setTime(recordSet.getFieldValue("sj").toString());
248 | geoEntities.add(lineEntity);
249 | recordSet.moveNext();
250 | }
251 | } else if (dataSetType.equals(DatasetType.REGION)) {
252 | for (int i = 0; i < recordSet.getRecordCount(); i++) {
253 | GeoRegion region = (GeoRegion) recordSet.getGeometry();
254 | String entityId = recordPrefix + recordSet.getID();
255 | RegionObjectEntity regionEntity = new RegionObjectEntity(region, entityType, entityId);
256 | regionEntity.setTime(recordSet.getFieldValue("sj").toString());
257 | geoEntities.add(regionEntity);
258 | recordSet.moveNext();
259 | }
260 | }
261 | dataSource.close();
262 | return geoEntities;
263 | }
264 |
265 | /**
266 | * 将传入的数据填补,转换成适存储的grid
267 | *
268 | * @param gisData
269 | * @param gridLevel
270 | * @return
271 | */
272 | public static ArrayList getKnowledgeGraphModelFromObjects(ArrayList gisData,
273 | int gridLevel) {
274 | // TODO Auto-generated method stub
275 | for (GeoObjectEntity geoEntity : gisData) {
276 | // 处理点实体
277 | if (geoEntity instanceof PointObjectEntity) {
278 | PointObjectEntity pointEntity = (PointObjectEntity) geoEntity;
279 | pointEntity.setCellLevel(gridLevel);
280 | GeoPoint point = pointEntity.getPoint();
281 | ArrayList cellIds = S2.getGeoPointCoveringCell(point, gridLevel);
282 | pointEntity.setCellIds(cellIds);
283 | } else if (geoEntity instanceof LineObjectEntity) {
284 | LineObjectEntity lineEntity = (LineObjectEntity) geoEntity;
285 | lineEntity.setCellLevel(gridLevel);
286 | GeoLine line = lineEntity.getLine();
287 | ArrayList cellIds = S2.getGeoLineCoveringCells(line, gridLevel);
288 | lineEntity.setCellIds(cellIds);
289 | } else if (geoEntity instanceof RegionObjectEntity) {
290 | RegionObjectEntity regionEntity = (RegionObjectEntity) geoEntity;
291 | regionEntity.setCellLevel(gridLevel);
292 | GeoRegion region = regionEntity.getMultiPolygon();
293 | ArrayList cellIds = S2.getGeoRegionCoveringCells(region, gridLevel);
294 | regionEntity.setCellIds(cellIds);
295 | }
296 | }
297 | //完善实体信息后生成用于存储的网格实体
298 | ArrayList grids = getGridModelFromObjects(gisData);
299 | return grids;
300 | }
301 |
302 | /**
303 | * 使用完整的地理实体生成网格模型
304 | *
305 | * @param gisData
306 | * @return
307 | */
308 | private static ArrayList getGridModelFromObjects(ArrayList gisData) {
309 | // TODO Auto-generated method stub
310 | HashSet ids = new HashSet();
311 | for (GeoObjectEntity geoEntity : gisData) {
312 | for (Long id : geoEntity.getCellIds()) {
313 | ids.add(id);
314 | }
315 | }
316 | ArrayList grids = new ArrayList();
317 | for (Long id : ids) {
318 | ArrayList entityies = new ArrayList();
319 | for (GeoObjectEntity geoEntity : gisData) {
320 | for (Long cellId : geoEntity.getCellIds()) {
321 | if (cellId.longValue() == id.longValue()) {
322 | entityies.add(geoEntity);
323 | break;
324 | }
325 | }
326 | }
327 | ObjectGrid grid = new ObjectGrid(id, entityies);
328 | grids.add(grid);
329 | }
330 | return grids;
331 | }
332 |
333 | /**
334 | * 通过查询出的id去文件里取RecordSet
335 | *
336 | * @param idResults
337 | * @param originDataStorePath
338 | * @return
339 | */
340 | public static HashMap> getRecordSetFromIds(
341 | HashMap> idResults, String originDataStorePath) {
342 | // TODO Auto-generated method stub
343 | HashMap> idAndRecordSets = new HashMap>();
344 | for (String type : idResults.keySet()) {
345 | ArrayList recordIds = idResults.get(type);
346 | // 获得当前类型存在哪个数据源、数据集
347 | String[] idSplits = recordIds.get(0).split("_");
348 | String dataSourceId = idSplits[0];
349 | String dataSetId = idSplits[1];
350 | Workspace workSpace = new Workspace();
351 | DatasourceConnectionInfo dataSourceConnectionInfo = new DatasourceConnectionInfo();
352 | dataSourceConnectionInfo.setServer(originDataStorePath + File.separator + dataSourceId + ".udb");
353 | dataSourceConnectionInfo.setEngineType(EngineType.UDB);
354 | Datasource dataSource = workSpace.getDatasources().open(dataSourceConnectionInfo);
355 | Dataset dataSet = dataSource.getDatasets().get(dataSetId + "_" + type);
356 | DatasetVector dataSetVector = (DatasetVector) dataSet;
357 | Recordset recordSet = dataSetVector.getRecordset(false, CursorType.STATIC);
358 | ArrayList recordSetEntities = new ArrayList();
359 | for (String recordId : recordIds) {
360 | int recordIndex = Integer.valueOf(recordId.split("_")[2]);
361 | recordSet.moveTo(recordIndex);
362 | // 通过recordset取出所需要的字段值
363 | RecordSetEntity recordSetEntity = new RecordSetEntity(recordSet);
364 | recordSetEntities.add(recordSetEntity);
365 | }
366 | idAndRecordSets.put(type, recordSetEntities);
367 | dataSource.close();
368 | }
369 | return idAndRecordSets;
370 | }
371 |
372 | /**
373 | * 判断数据集是否需要已经存过了,已经存过的不需要再存
374 | *
375 | * @param dataSet
376 | * 需要判断的数据集
377 | * @param originDataStorePath
378 | * 存储数据的文件夹
379 | * @return 没有存储过返回true,存储过不需要存的返回false
380 | */
381 | public static boolean dataSetNeedStoreOrNot(Dataset dataSet, String originDataStorePath) {
382 | // 找出所有的udb文件
383 | File file = new File(originDataStorePath);
384 | File[] fs = file.listFiles();
385 | if (fs.length == 0) {
386 | return true;
387 | }
388 | Datasource dataSource = null;
389 | for (File f : fs) {
390 | if (!f.getName().endsWith("udb")) {
391 | continue;
392 | }
393 |
394 | // 对所有udb文件中的dataset与当前dataset进行比较
395 | Workspace workSpace = new Workspace();
396 | DatasourceConnectionInfo datasourceConnectionInfo = new DatasourceConnectionInfo();
397 | datasourceConnectionInfo.setServer(f.getAbsolutePath());
398 | // System.out.println(f.getAbsolutePath());
399 | datasourceConnectionInfo.setEngineType(EngineType.UDB);
400 | dataSource = workSpace.getDatasources().open(datasourceConnectionInfo);
401 | Datasets dataSets = dataSource.getDatasets();
402 | for (int i = 0; i < dataSets.getCount(); i++) {
403 | Dataset originDataSet = dataSets.get(i);
404 | // 判断两个数据集是否一样的条件为:数据集名称、数据集类型(点线面体等)以及数据表名称
405 | if (dataSet.getName().equals(originDataSet.getName().split("_")[1])
406 | && dataSet.getType().equals(originDataSet.getType())) {
407 | // 判断相同位置的某一个recordset的字段列数是否一致,一致则认为是同一数据集
408 | DatasetVector dataSetVector = (DatasetVector) dataSet;
409 | Recordset recordSet = dataSetVector.getRecordset(false, CursorType.STATIC);
410 | // recordSet.moveFirst();
411 | DatasetVector originDataSetVector = (DatasetVector) originDataSet;
412 | Recordset originRecordSet = originDataSetVector.getRecordset(false, CursorType.STATIC);
413 | // originRecordSet.moveFirst();
414 | //检测要存储的数据集有没有时间属性,没有则不存储
415 | try {
416 | recordSet.getFieldValue("sj");
417 | } catch (Exception e) {
418 | // TODO: handle exception
419 | System.out.println("=========================================");
420 | System.out.println(dataSet.getName() + "没有时间属性,无法存储");
421 | System.out.println("=========================================");
422 | return false;
423 | }
424 | if (recordSet.getFieldCount() == originRecordSet.getFieldCount()
425 | && recordSet.getFieldValue("sj").equals(originRecordSet.getFieldValue("sj"))) {
426 | dataSource.close();
427 | // System.out.println("Done");
428 | return false;
429 | } else {
430 | continue;
431 | }
432 | }
433 | }
434 | dataSource.close();
435 | }
436 | return true;
437 | }
438 | }
439 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/utils/Rdf4j.java:
--------------------------------------------------------------------------------
1 | package www.supermap.utils;
2 |
3 | import java.io.File;
4 | import java.io.FileInputStream;
5 | import java.io.FileNotFoundException;
6 | import java.io.FileOutputStream;
7 | import java.io.IOException;
8 | import java.io.InputStream;
9 | import java.util.ArrayList;
10 | import java.util.HashMap;
11 | import java.util.Optional;
12 | import java.util.Set;
13 |
14 | import org.eclipse.rdf4j.model.IRI;
15 | import org.eclipse.rdf4j.model.Model;
16 | import org.eclipse.rdf4j.model.Resource;
17 | import org.eclipse.rdf4j.repository.Repository;
18 | import org.eclipse.rdf4j.model.Statement;
19 | import org.eclipse.rdf4j.model.Value;
20 | import org.eclipse.rdf4j.model.ValueFactory;
21 | import org.eclipse.rdf4j.model.impl.LinkedHashModel;
22 | import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
23 | import org.eclipse.rdf4j.model.vocabulary.RDF;
24 | import org.eclipse.rdf4j.query.BindingSet;
25 | import org.eclipse.rdf4j.query.TupleQuery;
26 | import org.eclipse.rdf4j.query.TupleQueryResult;
27 | import org.eclipse.rdf4j.repository.RepositoryConnection;
28 | import org.eclipse.rdf4j.repository.RepositoryException;
29 | import org.eclipse.rdf4j.repository.RepositoryResult;
30 | import org.eclipse.rdf4j.repository.sail.SailRepository;
31 | import org.eclipse.rdf4j.rio.RDFFormat;
32 | import org.eclipse.rdf4j.rio.RDFParseException;
33 | import org.eclipse.rdf4j.rio.Rio;
34 | import org.eclipse.rdf4j.rio.UnsupportedRDFormatException;
35 | import org.eclipse.rdf4j.sail.memory.MemoryStore;
36 | import org.eclipse.rdf4j.sail.nativerdf.NativeStore;
37 |
38 | import com.google.common.geometry.S2CellId;
39 |
40 | import www.supermap.model.iobjects.GeoObjectEntity;
41 | import www.supermap.model.iobjects.LineObjectEntity;
42 | import www.supermap.model.iobjects.ObjectGrid;
43 | import www.supermap.model.iobjects.PointObjectEntity;
44 | import www.supermap.model.iobjects.RecordSetEntity;
45 | import www.supermap.model.iobjects.RegionObjectEntity;
46 |
47 | /**
48 | * RDF4J相关
49 | *
50 | * @author SunYasong
51 | *
52 | */
53 | public class Rdf4j {
54 | /**
55 | * 得到一个初始化后的可以放再本地的内存仓库
56 | *
57 | * @param storeDir
58 | * 仓库存储的目录
59 | * @return
60 | */
61 | public static Repository getNavmoryStore(String storeDir) {
62 | File dataDir = new File(storeDir);
63 | MemoryStore memoryStore = new MemoryStore(dataDir);
64 | // memoryStore.setSyncDelay(1000L);
65 | Repository store = new SailRepository(memoryStore);
66 | store.initialize();
67 | return store;
68 | }
69 |
70 | /**
71 | * 得到一个初始化后的本地数据库
72 | *
73 | * @param storeDir
74 | * @return
75 | */
76 | public static Repository getNaviteStore(String storeDir) {
77 | File dataDir = new File(storeDir);
78 | NativeStore nativeStore = new NativeStore(dataDir);
79 | Repository store = new SailRepository(nativeStore);
80 | store.initialize();
81 | return store;
82 | }
83 |
84 | /**
85 | * 通过指定三元组从指定仓库中查询符合条件的语句
86 | *
87 | * @param db
88 | * @param subject
89 | * @param predicate
90 | * @param bing
91 | * @return
92 | */
93 | public static RepositoryResult queryByStatement(Repository db, IRI subject, IRI predicate, IRI bing) {
94 | RepositoryResult statements = null;
95 | try (RepositoryConnection conn = db.getConnection()) {
96 | // 有条件的查询
97 | statements = conn.getStatements(subject, predicate, bing, true);
98 | statements.close();
99 | } catch (Exception e) {
100 | System.out.println("---------------------" + e.getMessage());
101 | } finally {
102 | db.shutDown();
103 | }
104 | return statements;
105 | }
106 |
107 | /**
108 | * 通过传入的多个cell和类型查询知识图谱,并返回结果
109 | *
110 | * @param coverCells
111 | * 缓冲区内的所有cell
112 | * @param geoTypes
113 | * 要查询的实体类型
114 | * @return
115 | */
116 | public static HashMap> queryGeoFromMultiCellsAndGeoTypes(String storeDir,
117 | ArrayList coverCells, String[] geoTypes) {
118 | // TODO Auto-generated method stub
119 | // 判断指定类型还是全部类型
120 | // 全部类型
121 | Model model = new LinkedHashModel();
122 | if (geoTypes == null || geoTypes.length == 0) {
123 | Model preModel = getRDF4jModelFromAllCellAndGeoTypes(storeDir, coverCells);
124 | model.addAll(preModel);
125 | }
126 | // 指定类型
127 | else {
128 | // 从图谱中查询,得到所有符合类型的model
129 | for (S2CellId cell : coverCells) {
130 | Model preModel = getRDF4jModelFromSingleCellAndGeoTypes(storeDir, cell, geoTypes);
131 | model.addAll(preModel);
132 | }
133 | }
134 |
135 | // 对model按类型进行分解,并存到hashmap
136 | HashMap> result = getInfoFromRDF4jModel(model);
137 | return result;
138 | }
139 |
140 | /**
141 | * 通过传入的多个cell和类型查询知识图谱,并返回结果
142 | *
143 | * @param coverCells
144 | * 缓冲区内的所有cell
145 | * @param geoTypes
146 | * 要查询的实体类型
147 | * @return
148 | */
149 | public static HashMap> queryGeoInfoFromMultiCellsAndGeoTypes(
150 | String KnowledgeGraphStorePath, String originDataStorePath, ArrayList coverCells,
151 | String[] geoTypes) {
152 | // TODO Auto-generated method stub
153 | // 判断指定类型还是全部类型
154 | // 全部类型
155 | Model model = new LinkedHashModel();
156 | if (geoTypes == null || geoTypes.length == 0) {
157 | Model preModel = getRDF4jModelFromAllCellAndGeoTypes(KnowledgeGraphStorePath, coverCells);
158 | model.addAll(preModel);
159 | }
160 | // 指定类型
161 | else {
162 | // 从图谱中查询,得到所有符合类型的model
163 | for (S2CellId cell : coverCells) {
164 | Model preModel = getRDF4jModelFromSingleCellAndGeoTypes(KnowledgeGraphStorePath, cell, geoTypes);
165 | model.addAll(preModel);
166 | }
167 | }
168 |
169 | // 对model按类型进行分解,并存到hashmap
170 | // HashMap> result =
171 | // getInfoFromRDF4jModel(model);
172 | HashMap> result = getRecordSetFromRDF4jModel(model, originDataStorePath);
173 | return result;
174 | }
175 |
176 | /**
177 | * 通过查询出的model里的id去数据源中读取
178 | *
179 | * @param model
180 | * @param storeDir
181 | * @return
182 | */
183 | private static HashMap> getRecordSetFromRDF4jModel(Model model,
184 | String originDataStorePath) {
185 | // TODO Auto-generated method stub
186 | // 找出Model里的所有类型
187 | HashMap> info = new HashMap>();
188 | Set allTypes = model.filter(null, RDF.TYPE, null).objects();
189 | for (Value value : allTypes) {
190 | // 去除cell信息
191 | if (value.toString().substring(16).equals("Cell")) {
192 | continue;
193 | }
194 | String entityType = value.toString().substring(16);
195 | // 找到指定类型的实体
196 | Set allEntity = model.filter(null, RDF.TYPE, value).subjects();
197 | ArrayList recordSets = new ArrayList();
198 | for (Resource resource : allEntity) {
199 | String entity = resource.toString();
200 | String[] splitEntity = entity.split("#");
201 | String recordId = splitEntity[splitEntity.length - 1];
202 | RecordSetEntity recordSetEntity = new RecordSetEntity(recordId, originDataStorePath, entityType);
203 | recordSets.add(recordSetEntity);
204 | }
205 | info.put(entityType, recordSets);
206 | }
207 | return info;
208 | }
209 |
210 | /**
211 | * 获得多个cell里的所有类型的数据
212 | *
213 | * @param storeDir
214 | * @param coverCells
215 | * @return
216 | */
217 | private static Model getRDF4jModelFromAllCellAndGeoTypes(String storeDir, ArrayList coverCells) {
218 | // TODO Auto-generated method stub
219 | Repository store = new SailRepository(new MemoryStore());
220 | store.initialize();
221 | ValueFactory f = store.getValueFactory();
222 | Model model = new LinkedHashModel();
223 | for (S2CellId cell : coverCells) {
224 | String fileName = storeDir + "\\" + cell.id() + ".ntriples";
225 | InputStream input;
226 | try {
227 | input = new FileInputStream(fileName);
228 | } catch (FileNotFoundException e) {
229 | // TODO Auto-generated catch block
230 | continue;
231 | }
232 | Model curModel;
233 | try {
234 | curModel = Rio.parse(input, "", RDFFormat.NTRIPLES).filter(null, RDF.TYPE, null);
235 | model.addAll(curModel);
236 | } catch (RDFParseException e) {
237 | // TODO Auto-generated catch block
238 | e.printStackTrace();
239 | } catch (UnsupportedRDFormatException e) {
240 | // TODO Auto-generated catch block
241 | e.printStackTrace();
242 | } catch (IOException e) {
243 | // TODO Auto-generated catch block
244 | e.printStackTrace();
245 | }
246 | }
247 | return model.filter(null, RDF.TYPE, null);
248 | }
249 |
250 | /**
251 | * 从一个cell中获得符合类型的model
252 | *
253 | * @param cell
254 | * @param geoTypes
255 | * @return
256 | */
257 | private static Model getRDF4jModelFromSingleCellAndGeoTypes(String storeDir, S2CellId cell, String[] geoTypes) {
258 | // TODO Auto-generated method stub
259 | String fileName = storeDir + "\\" + cell.id() + ".ntriples";
260 | InputStream input;
261 | Model curModel = null;
262 | try {
263 | input = new FileInputStream(fileName);
264 | try {
265 | curModel = Rio.parse(input, "", RDFFormat.NTRIPLES);
266 | } catch (RDFParseException e) {
267 | // TODO Auto-generated catch block
268 | e.printStackTrace();
269 | } catch (UnsupportedRDFormatException e) {
270 | // TODO Auto-generated catch block
271 | e.printStackTrace();
272 | } catch (IOException e) {
273 | // TODO Auto-generated catch block
274 | e.printStackTrace();
275 | }
276 | } catch (FileNotFoundException e) {
277 | // TODO Auto-generated catch block
278 | return new LinkedHashModel();
279 | // e.printStackTrace();
280 | }
281 | Model filterModel = new LinkedHashModel();
282 | Repository store = new SailRepository(new MemoryStore());
283 | store.initialize();
284 | ValueFactory f = store.getValueFactory();
285 | for (String geoType : geoTypes) {
286 | IRI geoEntity = f.createIRI("http://ontology/" + geoType);
287 | filterModel.addAll(curModel.filter(null, RDF.TYPE, geoEntity));
288 | }
289 | return filterModel;
290 | }
291 |
292 | /**
293 | * 对model中的信息进行分类
294 | *
295 | * @param model
296 | * @return
297 | */
298 | private static HashMap> getInfoFromRDF4jModel(Model model) {
299 | // TODO Auto-generated method stub
300 | // 找出Model里的所有类型
301 | HashMap> info = new HashMap>();
302 | Set allTypes = model.filter(null, RDF.TYPE, null).objects();
303 | for (Value value : allTypes) {
304 | // 去除cell信息
305 | if (value.toString().substring(16).equals("Cell")) {
306 | continue;
307 | }
308 | // 找到指定类型的实体
309 | Set allEntity = model.filter(null, RDF.TYPE, value).subjects();
310 | ArrayList entitys = new ArrayList();
311 | for (Resource resource : allEntity) {
312 | String entity = resource.toString();
313 | String[] splitEntity = entity.split("#");
314 | entitys.add(splitEntity[splitEntity.length - 1]);
315 | }
316 | info.put(value.toString().substring(16), entitys);
317 | }
318 | return info;
319 | }
320 |
321 | /**
322 | * 将通过objects生成的Grid数据写入知识图谱中
323 | *
324 | * @param gridModels
325 | * @param storeDir
326 | * @return
327 | */
328 | public static Boolean writeToKnowledgeGraphFromObject(ArrayList gridModels, String storeDir) {
329 | // TODO Auto-generated method stub
330 | Repository store = new SailRepository(new MemoryStore());
331 | store.initialize();
332 | ValueFactory f = store.getValueFactory();
333 | String eneityPrefix = "http://";
334 | String ontologyPrefix = "http://ontology";
335 | String cellsPrefix = "http://cell/id#";
336 | String timePrefix = "http://time/id#";
337 | IRI ontologyCell = f.createIRI(ontologyPrefix + "/Cell");
338 | IRI ontologyTime = f.createIRI(ontologyPrefix + "/Time");
339 | IRI ontologyHave = f.createIRI(ontologyPrefix + "/have");
340 | IRI ontologyInclude = f.createIRI(ontologyPrefix + "/include");
341 | IRI ontologyContain = f.createIRI(ontologyPrefix + "/contain");
342 | // IRI ontologyInclude = f.createIRI(ontologyPrefix + "/include");
343 | for (ObjectGrid grid : gridModels) {
344 | Model model = new LinkedHashModel();
345 | // 实体写入model
346 | Long cellId = grid.getId();
347 |
348 | for (GeoObjectEntity geoEntity : grid.getGeoEntitys()) {
349 | String timeId = null;
350 | String entityType = null;
351 | String entityId = null;
352 | // 处理点实体对象
353 | if (geoEntity instanceof PointObjectEntity) {
354 | PointObjectEntity pointEntity = (PointObjectEntity) geoEntity;
355 | entityType = pointEntity.getEntityType();
356 | entityId = pointEntity.getEntityId();
357 | timeId = pointEntity.getTime();
358 | } else if (geoEntity instanceof LineObjectEntity) {
359 | LineObjectEntity lineEntity = (LineObjectEntity) geoEntity;
360 | entityType = lineEntity.getEntityType();
361 | entityId = lineEntity.getEntityId();
362 | timeId = lineEntity.getTime();
363 | } else if (geoEntity instanceof RegionObjectEntity) {
364 | RegionObjectEntity RegionObjectEntity = (RegionObjectEntity) geoEntity;
365 | entityType = RegionObjectEntity.getEntityType();
366 | entityId = RegionObjectEntity.getEntityId();
367 | timeId = RegionObjectEntity.getTime();
368 | }
369 | IRI cellIID = f.createIRI(cellsPrefix + cellId);
370 | IRI timeIID = f.createIRI(timePrefix + timeId);
371 | IRI entityIdIRI = f.createIRI(eneityPrefix + entityType + "/id#" + entityId);
372 | IRI typeEntity = f.createIRI(ontologyPrefix + "/" + entityType);
373 | model.add(cellIID, RDF.TYPE, ontologyCell);
374 | model.add(timeIID, RDF.TYPE, ontologyTime);
375 | model.add(cellIID, ontologyInclude, timeIID);
376 | model.add(timeIID, ontologyHave, typeEntity);
377 | model.add(entityIdIRI, RDF.TYPE, typeEntity);
378 | model.add(timeIID, ontologyContain, entityIdIRI);
379 | // model.add(typeEntity,ontologyInclude,entityIdIRI);
380 | // entityType = entityType.substring(0, 1).toUpperCase() +
381 | // entityType.substring(1);
382 | // model.add(cellIID, ontologyHave, entityIdIRI);
383 | }
384 | // 通过判断生成的文件是否存在来检查是更新图谱还是要新建
385 | File file = new File(storeDir + "\\" + cellId + ".ntriples");
386 | if (file.exists()) {
387 | InputStream input;
388 | try {
389 | input = new FileInputStream(file);
390 | Model preModel;
391 | try {
392 | preModel = Rio.parse(input, "", RDFFormat.NTRIPLES);
393 | model.addAll(preModel);
394 | } catch (RDFParseException e) {
395 | // TODO Auto-generated catch block
396 | e.printStackTrace();
397 | } catch (UnsupportedRDFormatException e) {
398 | // TODO Auto-generated catch block
399 | e.printStackTrace();
400 | } catch (IOException e) {
401 | // TODO Auto-generated catch block
402 | e.printStackTrace();
403 | }
404 | } catch (FileNotFoundException e) {
405 | // TODO Auto-generated catch block
406 | e.printStackTrace();
407 | }
408 | }
409 | FileOutputStream out;
410 | try {
411 | out = new FileOutputStream(file);
412 | Rio.write(model, out, RDFFormat.NTRIPLES);
413 | try {
414 | out.close();
415 | } catch (IOException e) {
416 | // TODO Auto-generated catch block
417 | e.printStackTrace();
418 | }
419 | } catch (FileNotFoundException e) {
420 | // TODO Auto-generated catch block
421 | e.printStackTrace();
422 | }
423 | }
424 | return false;
425 | }
426 |
427 | /**
428 | * 通过传入的多个cell和类型查询知识图谱,并返回结果
429 | *
430 | * @param storeDir
431 | * 知识图谱路径
432 | * @param coverCells
433 | * 缓冲区内的所有cell
434 | * @param geoTypes
435 | * 要查询的实体类型
436 | * @param time
437 | * 时间
438 | * @return
439 | */
440 | public static HashMap> queryGeoFromMultiCellsAndGeoTypes(String storeDir,
441 | ArrayList coverCells, String[] geoTypes, String time) {
442 | // 判断指定类型还是全部类型
443 | // 全部类型
444 | Model model = new LinkedHashModel();
445 | if (geoTypes == null || geoTypes.length == 0) {
446 | Model preModel = getRDF4jModelFromAllCellAndGeoTypes(storeDir, coverCells, time);
447 | model.addAll(preModel);
448 | }
449 | // 指定类型
450 | else {
451 | // 从图谱中查询,得到所有符合类型的model
452 | for (S2CellId cell : coverCells) {
453 | Model preModel = getRDF4jModelFromSingleCellAndGeoTypes(storeDir, cell, geoTypes, time);
454 | model.addAll(preModel);
455 | }
456 | }
457 |
458 | // 对model按类型进行分解,并存到hashmap
459 | HashMap> result = getInfoFromRDF4jModel(model);
460 | return result;
461 | }
462 |
463 | private static Model getRDF4jModelFromSingleCellAndGeoTypes(String storeDir, S2CellId cell, String[] geoTypes,
464 | String time) {
465 | Repository store = new SailRepository(new MemoryStore());
466 | store.initialize();
467 | ValueFactory f = store.getValueFactory();
468 | Model model = new LinkedHashModel();
469 | try (RepositoryConnection conn = store.getConnection()) {
470 | String fileName = storeDir + "\\" + cell.id() + ".ntriples";
471 | try {
472 | InputStream input = new FileInputStream(fileName);
473 | conn.add(input, "", RDFFormat.NTRIPLES);
474 | } catch (FileNotFoundException e) {
475 | // TODO Auto-generated catch block
476 | e.printStackTrace();
477 | } catch (RDFParseException e) {
478 | // TODO Auto-generated catch block
479 | e.printStackTrace();
480 | } catch (RepositoryException e) {
481 | // TODO Auto-generated catch block
482 | e.printStackTrace();
483 | } catch (IOException e) {
484 | // TODO Auto-generated catch block
485 | e.printStackTrace();
486 | }
487 | String geoQueryString = "SELECT ?geo WHERE { ?geo . }";
489 | TupleQuery geoQuery = conn.prepareTupleQuery(geoQueryString);
490 | TupleQuery typeQuery = conn.prepareTupleQuery(
491 | "SELECT ?geo ?type WHERE { ?geo ?type . }");
492 | try (TupleQueryResult geoResult = geoQuery.evaluate()) {
493 | while (geoResult.hasNext()) {
494 | BindingSet geoBinding = geoResult.next();
495 | Value geoName = geoBinding.getValue("geo");
496 | typeQuery.setBinding("geo", geoName);
497 | try (TupleQueryResult typeResult = typeQuery.evaluate()) {
498 | while (typeResult.hasNext()) {
499 | BindingSet typeBinding = typeResult.next();
500 | // System.out.println(typeBinding.getValue("geo"));
501 | // System.out.println(typeBinding.getValue("type"));
502 | Statement nameStatement = f.createStatement((Resource) typeBinding.getValue("geo"),
503 | RDF.TYPE, typeBinding.getValue("type"));
504 | model.add(nameStatement);
505 | }
506 |
507 | }
508 | }
509 | }
510 | }
511 | Model filterModel = new LinkedHashModel();
512 | for (String geoType : geoTypes) {
513 | IRI geoEntity = f.createIRI("http://ontology/" + geoType);
514 | filterModel.addAll(model.filter(null, RDF.TYPE, geoEntity));
515 | }
516 | return filterModel;
517 | }
518 |
519 | /**
520 | * 按照时间获得多个cell里的所有类型的数据
521 | *
522 | * @param storeDir
523 | * @param coverCells
524 | * @param time
525 | * @return
526 | */
527 | private static Model getRDF4jModelFromAllCellAndGeoTypes(String storeDir, ArrayList coverCells,
528 | String time) {
529 | Repository store = new SailRepository(new MemoryStore());
530 | store.initialize();
531 | ValueFactory f = store.getValueFactory();
532 | Model model = new LinkedHashModel();
533 | for (S2CellId cell : coverCells) {
534 | try (RepositoryConnection conn = store.getConnection()) {
535 | String fileName = storeDir + "\\" + cell.id() + ".ntriples";
536 | try {
537 | InputStream input = new FileInputStream(fileName);
538 | conn.add(input, "", RDFFormat.NTRIPLES);
539 | } catch (FileNotFoundException e) {
540 | // TODO Auto-generated catch block
541 | continue;
542 | } catch (RDFParseException e) {
543 | // TODO Auto-generated catch block
544 | e.printStackTrace();
545 | } catch (RepositoryException e) {
546 | // TODO Auto-generated catch block
547 | e.printStackTrace();
548 | } catch (IOException e) {
549 | // TODO Auto-generated catch block
550 | e.printStackTrace();
551 | }
552 | String geoQueryString = "SELECT ?geo WHERE { ?geo . }";
554 | TupleQuery geoQuery = conn.prepareTupleQuery(geoQueryString);
555 | TupleQuery typeQuery = conn.prepareTupleQuery(
556 | "SELECT ?geo ?type WHERE { ?geo ?type . }");
557 | try (TupleQueryResult geoResult = geoQuery.evaluate()) {
558 | while (geoResult.hasNext()) {
559 | BindingSet geoBinding = geoResult.next();
560 | Value geoName = geoBinding.getValue("geo");
561 | typeQuery.setBinding("geo", geoName);
562 | try (TupleQueryResult typeResult = typeQuery.evaluate()) {
563 | while (typeResult.hasNext()) {
564 | BindingSet typeBinding = typeResult.next();
565 | // System.out.println(typeBinding.getValue("geo"));
566 | // System.out.println(typeBinding.getValue("type"));
567 | Statement nameStatement = f.createStatement((Resource) typeBinding.getValue("geo"),
568 | RDF.TYPE, typeBinding.getValue("type"));
569 | model.add(nameStatement);
570 | }
571 |
572 | }
573 | }
574 | }
575 | }
576 |
577 | }
578 | return model;
579 | }
580 | }
581 |
--------------------------------------------------------------------------------
/src/main/java/www/supermap/utils/S2.java:
--------------------------------------------------------------------------------
1 | package www.supermap.utils;
2 |
3 | import java.util.ArrayList;
4 | import java.util.Collections;
5 | import java.util.HashSet;
6 |
7 | import com.google.common.geometry.S2Cap;
8 | import com.google.common.geometry.S2CellId;
9 | import com.google.common.geometry.S2LatLng;
10 | import com.google.common.geometry.S2Loop;
11 | import com.google.common.geometry.S2Point;
12 | import com.google.common.geometry.S2Polygon;
13 | import com.google.common.geometry.S2Polyline;
14 | import com.google.common.geometry.S2Region;
15 | import com.google.common.geometry.S2RegionCoverer;
16 | import com.supermap.data.GeoLine;
17 | import com.supermap.data.GeoPoint;
18 | import com.supermap.data.GeoRegion;
19 | import com.supermap.data.Point2D;
20 | import com.vividsolutions.jts.geom.Coordinate;
21 | import com.vividsolutions.jts.geom.Geometry;
22 | import com.vividsolutions.jts.geom.LineString;
23 | import com.vividsolutions.jts.geom.MultiLineString;
24 | import com.vividsolutions.jts.geom.MultiPolygon;
25 | import com.vividsolutions.jts.geom.Point;
26 | import com.vividsolutions.jts.geom.Polygon;
27 | /**
28 | * google s2相关的方法
29 | * @author SunYasong
30 | *
31 | */
32 | public class S2 {
33 | /**
34 | * 指定网格与缓冲区半径及网格层级,找到缓冲区的所有符合要求的网格
35 | * @param cell 指定的中心网格
36 | * @param radius 缓冲区半径,单位:米
37 | * @param cellLevel 层级
38 | * @return
39 | */
40 | public static ArrayList getCoveringCellIdsFromCell(S2CellId cell,Double radius,int cellLevel){
41 | S2LatLng la = cell.toLatLng();
42 | S2Point point = la.toPoint();
43 | Double capHeight = 1-Math.cos(radius/12742000);
44 | S2Cap cap = S2Cap.fromAxisHeight(point, capHeight);
45 | S2RegionCoverer cover = new S2RegionCoverer();
46 | cover.setMaxLevel(cellLevel);
47 | cover.setMinLevel(cellLevel);
48 | // cover.setMaxCells(30);
49 | ArrayList covering = new ArrayList();
50 | cover.getCovering(cap, covering);
51 | return covering;
52 | }
53 |
54 | /**
55 | * 指定经纬度缓冲区半径及网格层级来得到符合要求的网格
56 | * @param laln 经纬度
57 | * @param radius 半径:米
58 | * @param cellLevel 网格层级
59 | * @return
60 | */
61 | public static ArrayList getCoveringCellIdsFromLatlng(S2LatLng laln,Double radius,int cellLevel){
62 | S2Point point = laln.toPoint();
63 | Double capHeight = 1-Math.cos(radius/12742000);
64 | S2Cap cap = S2Cap.fromAxisHeight(point, capHeight);
65 | S2RegionCoverer cover = new S2RegionCoverer();
66 | cover.setMaxLevel(cellLevel);
67 | cover.setMinLevel(cellLevel);
68 | // cover.setMaxCells(30);
69 | ArrayList covering = new ArrayList();
70 | cover.getCovering(cap, covering);
71 | return covering;
72 | }
73 |
74 | /**
75 | * 按照网格长度计算网格级别
76 | * @param gridLength
77 | * @return
78 | */
79 | public static int getCellLevelFromLength(Double gridLength) {
80 | // TODO Auto-generated method stub
81 | double[] level = {9220000,4610000,2454000,1283000,643000,322000,161000,79000,40000,20000,10000,5000,2500,1260,632,315,157,78,39,19,9.8};
82 | for (int i = 0; i < level.length; i++) {
83 | if(gridLength>=level[i]) {
84 | return i;
85 | }
86 | }
87 | return level.length-1;
88 | }
89 |
90 | /**
91 | * 通过cell的id计算网格等级
92 | * @param cellId
93 | * @return
94 | */
95 | public static int getCellLevelFromId(Long cellId) {
96 | // TODO Auto-generated method stub
97 | S2CellId cell = new S2CellId(cellId);
98 | return cell.level();
99 | }
100 |
101 | /**
102 | * 获得GeoPoint所在的cell
103 | * @param point
104 | * @param gridLevel
105 | * @return
106 | */
107 | public static ArrayList getGeoPointCoveringCell(GeoPoint point, int gridLevel) {
108 | // TODO Auto-generated method stub
109 | ArrayList cellIds = new ArrayList();
110 | S2LatLng laln = S2LatLng.fromDegrees(point.getY(), point.getX());
111 | S2CellId cell = S2CellId.fromLatLng(laln).parent(gridLevel);
112 | cellIds.add(cell.id());
113 | return cellIds;
114 | }
115 |
116 | /**
117 | * 获得GeoLine所覆盖的cell集合
118 | * @param line
119 | * @param gridLevel
120 | * @return
121 | */
122 | public static ArrayList getGeoLineCoveringCells(GeoLine line, int gridLevel) {
123 | // TODO Auto-generated method stub
124 | HashSet cellIds = new HashSet();
125 | for (int i = 0; i < line.getPartCount(); i++) {
126 | ArrayList s2Points = new ArrayList();
127 | Point2D[] point2ds = line.getPart(i).toArray();
128 | S2Point beginPoint = S2LatLng.fromDegrees(point2ds[0].getY(), point2ds[0].getX()).toPoint();
129 | S2Point endPoint = S2LatLng.fromDegrees(point2ds[1].getY(), point2ds[1].getX()).toPoint();
130 | s2Points.add(beginPoint);
131 | s2Points.add(endPoint);
132 | S2Polyline polyLine = new S2Polyline(s2Points);
133 | S2RegionCoverer cover = new S2RegionCoverer();
134 | cover.setMaxLevel(gridLevel);
135 | cover.setMinLevel(gridLevel);
136 | ArrayList covering = new ArrayList();
137 | cover.getCovering(polyLine, covering);
138 | for (S2CellId s2CellId : covering) {
139 | cellIds.add(s2CellId.id());
140 | }
141 | }
142 | return new ArrayList(cellIds);
143 | }
144 |
145 | /**
146 | * 获得GeoRegion所覆盖的面所占的cell
147 | * @param region
148 | * @param gridLevel
149 | * @return
150 | */
151 | public static ArrayList getGeoRegionCoveringCells(GeoRegion region, int gridLevel) {
152 | // TODO Auto-generated method stub
153 | //取出第一个面的所有转成S2Point的点(没有重复点)
154 | Point2D[] points = region.getPart(0).toArray();
155 | Point2D firstPoint = points[0];
156 | ArrayList s2Points = new ArrayList();
157 | for (int i = 1; i < points.length; i++) {
158 | S2Point point = S2LatLng.fromDegrees(points[i].getY(), points[i].getX()).toPoint();
159 | s2Points.add(point);
160 | if((firstPoint.getX()==points[i].getX())&&(firstPoint.getY()==points[i].getY())){
161 | break;
162 | }
163 | }
164 | /**
165 | * 找到S2point按逆时针存的集合
166 | * 思路:先找到一个Z轴最大值的点,尽量保证是凸点,然后找到相邻的两个点,计算法向量,判断z轴的值。
167 | * 举例:找到p1点,集合中p1的前一个点则为p2,后一个点为p3,用向量p1p2与p3p1叉乘计算法向量,然后指定一个向量为(0,0,1),计算两者之间的值,夹角小于90值为正,反之则为负。由于指定向量的关系,逆时针的为正,所以计算出为正,直接使用。反之则翻转。
168 | */
169 | //找到z值最大的点
170 | int maxIndex = 0;
171 | for (int i=1; is2Points.get(maxIndex).get(2)){
173 | maxIndex = i;
174 | }
175 | }
176 | //找到相邻的3个点,放进数组
177 | S2Point[] s2Arr = new S2Point[3];
178 | if(maxIndex!=0&&maxIndex!=s2Points.size()-1){
179 | s2Arr[0]=s2Points.get(maxIndex-1);
180 | s2Arr[1]=s2Points.get(maxIndex);
181 | s2Arr[2]=s2Points.get(maxIndex+1);
182 | }
183 | else if(maxIndex==0){
184 | s2Arr[0]=s2Points.get(s2Points.size()-1);
185 | s2Arr[1]=s2Points.get(0);
186 | s2Arr[2]=s2Points.get(1);
187 | }else{
188 | s2Arr[0]=s2Points.get(maxIndex-1);
189 | s2Arr[1]=s2Points.get(maxIndex);
190 | s2Arr[2]=s2Points.get(0);
191 | }
192 | //向量叉乘。第二个点与第一个点组成向量叉乘第三个点与第二个点组成的向量
193 | S2Point firstVer = S2Point.sub(s2Arr[0],s2Arr[1]);
194 | S2Point endVer = S2Point.sub(s2Arr[1],s2Arr[2]);
195 | S2Point crossVaule = S2Point.crossProd(firstVer, endVer);
196 | //以z值为判断条件,大于0则世界使用,小于0则翻转
197 | if(crossVaule.get(2)<0){
198 | Collections.reverse(s2Points);
199 | }
200 | // System.out.println(s2Points.size());
201 | S2Loop s2Loop = new S2Loop(s2Points);
202 | S2Polygon polygon = new S2Polygon(s2Loop); // 创建多边形
203 | S2RegionCoverer cover = new S2RegionCoverer();
204 | cover.setMaxLevel(gridLevel);
205 | cover.setMinLevel(gridLevel);
206 | ArrayList covering = new ArrayList();
207 | cover.getCovering(polygon, covering);
208 | ArrayList cellIds = new ArrayList();
209 | for (S2CellId s2CellId : covering) {
210 | cellIds.add(s2CellId.id());
211 | }
212 | return cellIds;
213 | }
214 |
215 |
216 |
217 | }
218 |
--------------------------------------------------------------------------------