├── .gitignore
├── .idea
└── codeStyleSettings.xml
├── LICENSE.txt
├── README.md
├── build.xml
├── ivy.xml
├── pom.xml
└── src
├── main
├── assembly
│ ├── assembly.xml
│ └── uber-jar.xml
├── java
│ ├── Tweet.java
│ ├── TweetTweetTweet.java
│ └── edu
│ │ └── umn
│ │ └── cs
│ │ └── spatialHadoop
│ │ ├── Estimator.java
│ │ ├── OperationsParams.java
│ │ ├── ReadFile.java
│ │ ├── TigerShape.java
│ │ ├── core
│ │ ├── CSVOGC.java
│ │ ├── CellInfo.java
│ │ ├── Circle.java
│ │ ├── GridInfo.java
│ │ ├── GridRecordWriter.java
│ │ ├── OGCESRIShape.java
│ │ ├── OGCJTSShape.java
│ │ ├── Point.java
│ │ ├── Polygon.java
│ │ ├── Rectangle.java
│ │ ├── ResultCollector.java
│ │ ├── ResultCollector2.java
│ │ ├── Shape.java
│ │ ├── ShapeRecordWriter.java
│ │ ├── SpatialAlgorithms.java
│ │ └── SpatialSite.java
│ │ ├── delaunay
│ │ ├── DelaunayTriangulation.java
│ │ ├── DelaunayTriangulationOutputFormat.java
│ │ ├── DwyersAlgorithm.java
│ │ ├── GSDTAlgorithm.java
│ │ ├── GSImprovedAlgorithm.java
│ │ └── Triangulation.java
│ │ ├── hdf
│ │ ├── DDChunkData.java
│ │ ├── DDCompressedBlock.java
│ │ ├── DDID.java
│ │ ├── DDLinkedBlock.java
│ │ ├── DDNull.java
│ │ ├── DDNumberType.java
│ │ ├── DDNumericDataGroup.java
│ │ ├── DDScientificDDR.java
│ │ ├── DDScientificData.java
│ │ ├── DDUnknown.java
│ │ ├── DDVDataHeader.java
│ │ ├── DDVGroup.java
│ │ ├── DDVSet.java
│ │ ├── DDVersion.java
│ │ ├── DataDescriptor.java
│ │ ├── HDFConstants.java
│ │ └── HDFFile.java
│ │ ├── indexing
│ │ ├── AbstractRTreeBBPartitioner.java
│ │ ├── AbstractRTreeGBPartitioner.java
│ │ ├── AuxiliarySearchStructure.java
│ │ ├── CellPartitioner.java
│ │ ├── GlobalIndex.java
│ │ ├── GridPartitioner.java
│ │ ├── HilbertCurvePartitioner.java
│ │ ├── IndexInsert.java
│ │ ├── IndexOutputFormat.java
│ │ ├── Indexer.java
│ │ ├── KdTreePartitioner.java
│ │ ├── LocalIndex.java
│ │ ├── Partition.java
│ │ ├── Partitioner.java
│ │ ├── QuadTreePartitioner.java
│ │ ├── RRStarLocalIndex.java
│ │ ├── RRStarTree.java
│ │ ├── RStarTree.java
│ │ ├── RTreeGBPartitioner.java
│ │ ├── RTreeGuttman.java
│ │ ├── RTreeGuttmanQuadraticSplit.java
│ │ ├── RTreeOptimizer.java
│ │ ├── STRPartitioner.java
│ │ └── ZCurvePartitioner.java
│ │ ├── io
│ │ ├── InputSubstream.java
│ │ ├── MemoryInputStream.java
│ │ ├── MemoryOutputStream.java
│ │ ├── RandomCompressedInputStream.java
│ │ ├── RandomCompressedOutputStream.java
│ │ ├── Text2.java
│ │ ├── TextSerializable.java
│ │ ├── TextSerializerHelper.java
│ │ └── TrackedOutputStream.java
│ │ ├── mapred
│ │ ├── BinaryRecordReader.java
│ │ ├── BinarySpatialInputFormat.java
│ │ ├── BlockFilter.java
│ │ ├── CombineBlockFilter.java
│ │ ├── CombinedSpatialInputFormat.java
│ │ ├── DefaultBlockFilter.java
│ │ ├── FileSplitUtil.java
│ │ ├── GridOutputFormat.java
│ │ ├── GridOutputFormat2.java
│ │ ├── GridOutputFormat3.java
│ │ ├── GridRecordWriter.java
│ │ ├── GridRecordWriter2.java
│ │ ├── GridRecordWriter3.java
│ │ ├── PairWritable.java
│ │ ├── ShapeArrayInputFormat.java
│ │ ├── ShapeArrayRecordReader.java
│ │ ├── ShapeInputFormat.java
│ │ ├── ShapeIterInputFormat.java
│ │ ├── ShapeIterRecordReader.java
│ │ ├── ShapeLineInputFormat.java
│ │ ├── ShapeLineRecordReader.java
│ │ ├── ShapeRecordReader.java
│ │ ├── SpatialInputFormat.java
│ │ ├── SpatialRecordReader.java
│ │ ├── TextOutputFormat.java
│ │ └── TextOutputFormat3.java
│ │ ├── mapreduce
│ │ ├── LocalIndexRecordReader.java
│ │ ├── RandomInputFormat.java
│ │ ├── RandomShapeGenerator.java
│ │ ├── SampleInputFormat.java
│ │ ├── SampleRecordReaderLocalIndexFile.java
│ │ ├── SampleRecordReaderTextFile.java
│ │ ├── SpatialInputFormat3.java
│ │ └── SpatialRecordReader3.java
│ │ ├── nasa
│ │ ├── AggregateQuadTree.java
│ │ ├── DistributedAggregateSpatioTemporalIndexer.java
│ │ ├── GeoProjector.java
│ │ ├── HDFPlot.java
│ │ ├── HDFRasterLayer.java
│ │ ├── HDFRecordReader.java
│ │ ├── HDFToText.java
│ │ ├── HTTPFileSystem.java
│ │ ├── HTTPInputStream.java
│ │ ├── ImageCompare.java
│ │ ├── MercatorProjector.java
│ │ ├── MultiHDFPlot.java
│ │ ├── NASADataset.java
│ │ ├── NASAPoint.java
│ │ ├── NASARectangle.java
│ │ ├── NASAShape.java
│ │ ├── ShahedServer.java
│ │ ├── SpatioAggregateQueries.java
│ │ └── TimeRange.java
│ │ ├── operations
│ │ ├── Aggregate.java
│ │ ├── CatUnion.java
│ │ ├── ClosestPair.java
│ │ ├── ConvexHull.java
│ │ ├── DistributedCopy.java
│ │ ├── DistributedJoin.java
│ │ ├── FarthestPair.java
│ │ ├── FileMBR.java
│ │ ├── Head.java
│ │ ├── KNN.java
│ │ ├── LocalSampler.java
│ │ ├── Main.java
│ │ ├── OperationMetadata.java
│ │ ├── RandomSpatialGenerator.java
│ │ ├── RangeFilter.java
│ │ ├── RangeQuery.java
│ │ ├── RecordCount.java
│ │ ├── SJMR.java
│ │ ├── Sampler.java
│ │ ├── Shuffle.java
│ │ ├── Skyline.java
│ │ ├── Tail.java
│ │ ├── UltimateUnion.java
│ │ └── Union.java
│ │ ├── osm
│ │ ├── HasTag.java
│ │ ├── MapToJson.java
│ │ ├── OSMEdge.java
│ │ ├── OSMNode.java
│ │ ├── OSMPoint.java
│ │ ├── OSMPolygon.java
│ │ ├── OSMRelation.java
│ │ ├── OSMToKML.java
│ │ └── OSMWay.java
│ │ ├── temporal
│ │ └── TemporalIndex.java
│ │ ├── util
│ │ ├── BitArray.java
│ │ ├── FSUtil.java
│ │ ├── FileUtil.java
│ │ ├── FloatArray.java
│ │ ├── IFastSum.java
│ │ ├── IntArray.java
│ │ ├── JspSpatialHelper.java
│ │ ├── MemoryReporter.java
│ │ ├── MergeSorter.java
│ │ ├── MetadataUtil.java
│ │ ├── NASADatasetUtil.java
│ │ ├── Parallel.java
│ │ ├── Progressable.java
│ │ ├── ResultCollectorSynchronizer.java
│ │ ├── SampleIterable.java
│ │ ├── ShortArray.java
│ │ ├── TemporalIndexManager.java
│ │ └── WritableByteArray.java
│ │ └── visualization
│ │ ├── AdaptiveMultilevelPlot.java
│ │ ├── BinaryOutputFormat.java
│ │ ├── Canvas.java
│ │ ├── CanvasOutputFormat.java
│ │ ├── DaVinciServerV2.java
│ │ ├── DavinciServer.java
│ │ ├── FrequencyMap.java
│ │ ├── GeometricPlot.java
│ │ ├── GridHistogram.java
│ │ ├── HadoopvizServer.java
│ │ ├── HeatMapPlot.java
│ │ ├── Histogram.java
│ │ ├── ImageCanvas.java
│ │ ├── ImageOutputFormat.java
│ │ ├── ImagePlot.java
│ │ ├── LakesPlot.java
│ │ ├── MagickPlot.java
│ │ ├── MercatorProjection.java
│ │ ├── MultilevelPlot.java
│ │ ├── Plotter.java
│ │ ├── PyramidOutputFormat3.java
│ │ ├── SVGCanvas.java
│ │ ├── SVGGraphics.java
│ │ ├── SimpleGraphics.java
│ │ ├── SingleLevelPlot.java
│ │ ├── SubPyramid.java
│ │ ├── TileIndex.java
│ │ └── TileMBR.java
├── package
│ ├── bin
│ │ └── shadoop
│ └── etc
│ │ └── hadoop
│ │ ├── spatial-site.xml
│ │ └── spatial-site.xml.template
├── resources
│ ├── log4j.properties
│ ├── spatial-default.xml
│ ├── spatial-default.yaml
│ ├── spatial-operations.yaml
│ ├── webapps
│ │ ├── hdfs.old
│ │ │ ├── head.jsp
│ │ │ ├── knn.jsp
│ │ │ ├── pigeon.jsp
│ │ │ ├── plotter.jsp
│ │ │ ├── preprocess.jsp
│ │ │ ├── preview-script.jsp
│ │ │ ├── rangequery.jsp
│ │ │ ├── spatialjoin.jsp
│ │ │ └── visualizer.jsp
│ │ └── static
│ │ │ ├── hadoopviz
│ │ │ ├── css
│ │ │ │ ├── font-awesome.min.css
│ │ │ │ ├── hadoopviz.css
│ │ │ │ └── responstable.css
│ │ │ ├── fonts
│ │ │ │ ├── FontAwesome.otf
│ │ │ │ ├── fontawesome-webfont.eot
│ │ │ │ ├── fontawesome-webfont.svg
│ │ │ │ ├── fontawesome-webfont.ttf
│ │ │ │ ├── fontawesome-webfont.woff
│ │ │ │ └── fontawesome-webfont.woff2
│ │ │ ├── index.html
│ │ │ └── js
│ │ │ │ ├── display.js
│ │ │ │ ├── dust-full-2.0.0.min.js
│ │ │ │ ├── dust-helpers-1.1.1.min.js
│ │ │ │ ├── hadoopviz.js
│ │ │ │ └── jquery-2.2.3.min.js
│ │ │ ├── pigeon
│ │ │ ├── pigeon.js
│ │ │ └── pigeon_logo.png
│ │ │ ├── shahedfrontend
│ │ │ ├── aggregate_query.html
│ │ │ ├── css
│ │ │ │ └── shahed.css
│ │ │ ├── d3
│ │ │ │ ├── .DS_Store
│ │ │ │ ├── LICENSE
│ │ │ │ ├── d3.silder.css
│ │ │ │ ├── d3.silder.js
│ │ │ │ ├── d3.v3.js
│ │ │ │ └── d3.v3.min.js
│ │ │ ├── data
│ │ │ │ ├── .DS_Store
│ │ │ │ ├── data-alt.tsv
│ │ │ │ ├── data.csv
│ │ │ │ ├── data.tsv
│ │ │ │ ├── data2.csv
│ │ │ │ ├── data2.tsv
│ │ │ │ ├── sankey-formatted.json
│ │ │ │ ├── sankey.csv
│ │ │ │ └── sankeygreenhouse.json
│ │ │ ├── generate_image.html
│ │ │ ├── images
│ │ │ │ └── wait.gif
│ │ │ ├── index.html
│ │ │ ├── jquery-ui
│ │ │ │ ├── external
│ │ │ │ │ └── jquery
│ │ │ │ │ │ └── jquery.js
│ │ │ │ ├── images
│ │ │ │ │ ├── ui-bg_diagonals-thick_18_b81900_40x40.png
│ │ │ │ │ ├── ui-bg_diagonals-thick_20_666666_40x40.png
│ │ │ │ │ ├── ui-bg_flat_10_000000_40x100.png
│ │ │ │ │ ├── ui-bg_glass_100_f6f6f6_1x400.png
│ │ │ │ │ ├── ui-bg_glass_100_fdf5ce_1x400.png
│ │ │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png
│ │ │ │ │ ├── ui-bg_gloss-wave_35_f6a828_500x100.png
│ │ │ │ │ ├── ui-bg_highlight-soft_100_eeeeee_1x100.png
│ │ │ │ │ ├── ui-bg_highlight-soft_75_ffe45c_1x100.png
│ │ │ │ │ ├── ui-icons_222222_256x240.png
│ │ │ │ │ ├── ui-icons_228ef1_256x240.png
│ │ │ │ │ ├── ui-icons_ef8c08_256x240.png
│ │ │ │ │ ├── ui-icons_ffd27a_256x240.png
│ │ │ │ │ └── ui-icons_ffffff_256x240.png
│ │ │ │ ├── index.html
│ │ │ │ ├── jquery-ui.css
│ │ │ │ ├── jquery-ui.js
│ │ │ │ ├── jquery-ui.min.css
│ │ │ │ ├── jquery-ui.min.js
│ │ │ │ ├── jquery-ui.structure.css
│ │ │ │ ├── jquery-ui.structure.min.css
│ │ │ │ ├── jquery-ui.theme.css
│ │ │ │ └── jquery-ui.theme.min.css
│ │ │ ├── js
│ │ │ │ ├── brush.js
│ │ │ │ ├── jquery-2.0.0.min.js
│ │ │ │ └── shahed.js
│ │ │ ├── linegraph1.html
│ │ │ ├── pointQuery.html
│ │ │ └── respe.html
│ │ │ └── visualizer
│ │ │ ├── add.png
│ │ │ ├── areawater.png
│ │ │ ├── cells.png
│ │ │ ├── delete.png
│ │ │ ├── frontend.png
│ │ │ ├── frontend_progress.png
│ │ │ ├── frontend_sjoin.png
│ │ │ ├── jquery-1.10.2.min.js
│ │ │ ├── roads_rivers.png
│ │ │ ├── shadoop_logo.png
│ │ │ ├── visualizer.css
│ │ │ └── visualizer.js
│ └── zoom_view.html
└── scripts
│ ├── pig
│ ├── edge_node_join.pig
│ ├── osmx.pig
│ ├── planet-extractor.pig
│ └── splitter.pig
│ └── ruby
│ ├── copy_and_run.rb
│ ├── exp_dynindex.rb
│ ├── gdelt-download.rb
│ ├── hdf_downloader.rb
│ ├── index_measure.rb
│ ├── install-shadoop.rb
│ ├── poor_mans_test.rb
│ └── vd.rasem
└── test
├── java
└── edu
│ └── umn
│ └── cs
│ └── spatialHadoop
│ ├── BaseTest.java
│ ├── OperationsParamsTest.java
│ ├── TestHelper.java
│ ├── TigerShapeTest.java
│ ├── core
│ └── GridInfoTest.java
│ ├── delaunay
│ ├── DwyersAlgorithmTest.java
│ ├── GSDTAlgorithmTest.java
│ └── GSImprovedAlgorithmTest.java
│ ├── indexing
│ ├── AbstractRTreeBBPartitionerTest.java
│ ├── CellPartitionerTest.java
│ ├── IndexInsertTest.java
│ ├── IndexerTest.java
│ ├── PartitionerTest.java
│ ├── RRStarLocalInexTest.java
│ ├── RRStarTreeTest.java
│ ├── RStarTreeTest.java
│ ├── RTreeGuttmanTest.java
│ └── STRPartitionerTest.java
│ ├── io
│ ├── HeadTest.java
│ └── TextSerializerHelperTest.java
│ ├── mapreduce
│ ├── LocalIndexRecordReaderTest.java
│ ├── SampleInputFormatTest.java
│ ├── SampleRecordReader2Test.java
│ └── SpatialInputFormat3Test.java
│ ├── nasa
│ └── HDFRecordReaderTest.java
│ ├── operations
│ ├── KNNTest.java
│ ├── LocalSamplerTest.java
│ ├── SJMRTest.java
│ └── SamplerTest.java
│ ├── util
│ ├── BitArrayTest.java
│ ├── FSUtilTest.java
│ ├── IntArrayTest.java
│ └── SampleIterableTest.java
│ └── visualization
│ ├── MultilevelPlotTest.java
│ ├── SubPyramidTest.java
│ └── TileIndexTest.java
└── resources
├── Delaunay
├── DT.rb
├── test_dt1.points
├── test_dt1.svg
├── test_dt1.triangles
├── test_dt10.points
├── test_dt10.svg
├── test_dt10.triangles
├── test_dt11.points
├── test_dt11.svg
├── test_dt12.points
├── test_dt12.svg
├── test_dt12.triangles
├── test_dt13.points
├── test_dt13.svg
├── test_dt14.points
├── test_dt15.points
├── test_dt15.triangles
├── test_dt16.points
├── test_dt17.points
├── test_dt2.points
├── test_dt2.svg
├── test_dt2.triangles
├── test_dt3.points
├── test_dt3.svg
├── test_dt3.triangles
├── test_dt3.unsafe_sites
├── test_dt4.points
├── test_dt4.svg
├── test_dt4.triangles
├── test_dt5.points
├── test_dt5.svg
├── test_dt5.triangles
├── test_dt6.points
├── test_dt6.svg
├── test_dt6.triangles
├── test_dt7.points
├── test_dt7.svg
├── test_dt7.triangles
├── test_dt8.points
├── test_dt8.svg
├── test_dt8.triangles
├── test_dt9.points
├── test_dt9.svg
└── test_dt9.triangles
├── polys.osm
├── test.cells
├── test.points
├── test.rect
├── test111.points
├── test2.points
├── test3.points
├── test3_points.svg
├── test4.points
└── test_points.svg
/.gitignore:
--------------------------------------------------------------------------------
1 | /bin
2 | lib/ivy/**/*.jar
3 | /dist
4 | /target
5 | .idea
6 | *.iml
7 |
--------------------------------------------------------------------------------
/.idea/codeStyleSettings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | About This Content
2 | 2015
3 |
4 | License
5 | The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the Content is provided to you under the terms and conditions of the Apache License, Version 2.0. A copy of the Apache License, Version 2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
6 |
7 | If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content. Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the Redistributor. Unless otherwise indicated below, the terms and conditions of the Apache License, Version 2.0 still apply to any source code in the Content and such source code may be obtained at http://www.eclipse.org.
8 |
9 |
10 | Third Party Content
11 | The Content includes items that have been sourced from third parties as set out below. If you did not receive this Content directly from the Eclipse Foundation, the following is provided for informational purposes only, and you should look to the Redistributor’s license for terms and conditions of use.
12 |
--------------------------------------------------------------------------------
/ivy.xml:
--------------------------------------------------------------------------------
1 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/main/assembly/assembly.xml:
--------------------------------------------------------------------------------
1 |
8 |
10 | bin
11 |
12 | tar.gz
13 |
14 |
15 | false
16 |
17 |
18 |
19 |
20 | ${project.basedir}
21 | /
22 |
23 | README*
24 | LICENSE*
25 |
26 |
27 |
28 |
29 | src/main/package
30 |
31 |
32 |
33 |
34 | ${project.build.directory}
35 | share/hadoop/common/lib
36 |
37 | ${artifactId}-${version}.jar
38 |
39 |
40 |
41 |
42 |
43 |
44 | share/hadoop/common/lib
45 |
46 | com.vividsolutions:jts
47 | com.esri.geometry:esri-geometry-api
48 | com.sun.mail:javax.mail
49 | javax.mail:javax.mail-api
50 | org.yaml:snakeyaml
51 |
52 | false
53 |
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/src/main/assembly/uber-jar.xml:
--------------------------------------------------------------------------------
1 |
8 |
10 | uber
11 |
12 | jar
13 |
14 |
15 | false
16 |
17 |
18 |
19 |
20 | ${project.build.directory}/classes
21 |
22 |
23 |
24 |
25 |
26 |
27 | /
28 | true
29 |
30 | com.vividsolutions:jts
31 | com.esri.geometry:esri-geometry-api
32 | javax.mail:javax.mail-api
33 | com.sun.mail:javax.mail
34 | org.yaml:snakeyaml
35 |
36 | true
37 | false
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/src/main/java/Tweet.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | import java.io.DataInput;
10 | import java.io.DataOutput;
11 | import java.io.IOException;
12 |
13 | import edu.umn.cs.spatialHadoop.io.TextSerializerHelper;
14 | import org.apache.hadoop.io.Text;
15 |
16 | import edu.umn.cs.spatialHadoop.core.Point;
17 |
18 | /**
19 | * @author Eldawy
20 | *
21 | */
22 | public class Tweet extends Point {
23 | protected long id;
24 |
25 | public Tweet() {
26 | }
27 |
28 | public Tweet(Tweet tweet) {
29 | this.id = tweet.id;
30 | this.x = tweet.x;
31 | this.y = tweet.y;
32 | }
33 |
34 | @Override
35 | public void fromText(Text text) {
36 | this.id = TextSerializerHelper.consumeLong(text, ',');
37 | this.y = TextSerializerHelper.consumeDouble(text, ',');
38 | this.x = TextSerializerHelper.consumeDouble(text, '\0');
39 | }
40 |
41 | @Override
42 | public Text toText(Text text) {
43 | TextSerializerHelper.serializeLong(id, text, ',');
44 | TextSerializerHelper.serializeDouble(y, text, ',');
45 | TextSerializerHelper.serializeDouble(x, text, '\0');
46 | return text;
47 | }
48 |
49 | @Override
50 | public void write(DataOutput out) throws IOException {
51 | out.writeLong(id);
52 | super.write(out);
53 | }
54 |
55 | @Override
56 | public void readFields(DataInput in) throws IOException {
57 | id = in.readLong();
58 | super.readFields(in);
59 | }
60 |
61 | @Override
62 | public Tweet clone() {
63 | return new Tweet(this);
64 | }
65 |
66 | @Override
67 | public String toString() {
68 | return Long.toString(id);
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/ReadFile.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop;
10 |
11 | import edu.umn.cs.spatialHadoop.operations.OperationMetadata;
12 | import org.apache.hadoop.conf.Configuration;
13 | import org.apache.hadoop.fs.BlockLocation;
14 | import org.apache.hadoop.fs.FileSystem;
15 | import org.apache.hadoop.fs.Path;
16 | import org.apache.hadoop.util.GenericOptionsParser;
17 |
18 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
19 | import edu.umn.cs.spatialHadoop.indexing.GlobalIndex;
20 | import edu.umn.cs.spatialHadoop.indexing.Partition;
21 |
22 | /**
23 | * Reads spatial information associated with a file
24 | * @author eldawy
25 | *
26 | */
27 | @OperationMetadata(shortName = "readfile",
28 | description = "Retrieve some information about the index of a file")
29 | public class ReadFile {
30 |
31 | private static void printUsage() {
32 | System.out.println("Displays information about blocks in a file");
33 | System.out.println("Parameters:");
34 | System.out.println(" - Path to input file");
35 | }
36 |
37 | public static void main(String[] args) throws Exception {
38 | OperationsParams cla = new OperationsParams(new GenericOptionsParser(args));
39 | Path input = cla.getPath();
40 | if (input == null) {
41 | printUsage();
42 | throw new RuntimeException("Illegal parameters");
43 | }
44 | Configuration conf = new Configuration();
45 | Path inFile = new Path(args[0]);
46 | FileSystem fs = inFile.getFileSystem(conf);
47 |
48 | long length = fs.getFileStatus(inFile).getLen();
49 |
50 | GlobalIndex gindex = SpatialSite.getGlobalIndex(fs, inFile);
51 | if (gindex == null) {
52 | BlockLocation[] locations = cla.getInt("offset", 0) == -1 ?
53 | fs.getFileBlockLocations(fs.getFileStatus(inFile), 0, length) :
54 | fs.getFileBlockLocations(fs.getFileStatus(inFile), cla.getInt("offset", 0), 1);
55 | System.out.println(locations.length+" heap blocks");
56 | } else {
57 | for (Partition p : gindex) {
58 | long partition_length = fs.getFileStatus(new Path(inFile, p.filename)).getLen();
59 | System.out.println(p+" --- "+partition_length);
60 | }
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/core/ResultCollector.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.core;
10 |
11 | /**
12 | * Used to collect results of unary operators
13 | * @author eldawy
14 | *
15 | */
16 | public interface ResultCollector {
17 | public void collect(R r);
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/core/ResultCollector2.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.core;
10 |
11 | /**
12 | * Used to collect the output of binary operators.
13 | * @author eldawy
14 | *
15 | */
16 | public interface ResultCollector2 {
17 | public void collect(R r, S s);
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/core/Shape.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.core;
10 |
11 | import java.awt.Graphics;
12 |
13 | import org.apache.hadoop.io.Writable;
14 |
15 | import edu.umn.cs.spatialHadoop.io.TextSerializable;
16 |
17 | /**
18 | * A general 2D shape.
19 | * @author Ahmed Eldawy
20 | *
21 | */
22 | public interface Shape extends Writable, Cloneable, TextSerializable {
23 | /**
24 | * Returns minimum bounding rectangle for this shape.
25 | * @return The minimum bounding rectangle for this shape
26 | */
27 | public Rectangle getMBR();
28 |
29 | /**
30 | * Gets the distance of this shape to the given point.
31 | * @param x The x-coordinate of the point to compute the distance to
32 | * @param y The y-coordinate of the point to compute the distance to
33 | * @return The Euclidean distance between this object and the given point
34 | */
35 | public double distanceTo(double x, double y);
36 |
37 | /**
38 | * Returns true if this shape is intersected with the given shape
39 | * @param s The other shape to test for intersection with this shape
40 | * @return true
if this shape intersects with s; false
otherwise.
41 | */
42 | public boolean isIntersected(final Shape s);
43 |
44 | /**
45 | * Returns a clone of this shape
46 | * @return A new object which is a copy of this shape
47 | */
48 | public Shape clone();
49 |
50 | /**
51 | * Draws a shape to the given graphics.
52 | * @param g The graphics or canvas to draw to
53 | * @param fileMBR the MBR of the file in which the shape is contained
54 | * @param imageWidth width of the image to draw
55 | * @param imageHeight height of the image to draw
56 | * @param scale the scale used to convert shape coordinates to image coordinates
57 | * @deprecated Please use {@link #draw(Graphics, double, double)}
58 | */
59 | @Deprecated
60 | public void draw(Graphics g, Rectangle fileMBR, int imageWidth, int imageHeight, double scale);
61 |
62 | /**
63 | * Draws the shape to the given graphics and scale.
64 | * @param g - the graphics to draw the shape to.
65 | * @param xscale - scale of the image x-axis in terms of pixels per points.
66 | * @param yscale - scale of the image y-axis in terms of pixels per points.
67 | */
68 | public void draw(Graphics g, double xscale, double yscale);
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/core/ShapeRecordWriter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.core;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.io.NullWritable;
14 | import org.apache.hadoop.util.Progressable;
15 |
16 | public interface ShapeRecordWriter {
17 | /**
18 | * Writes the given shape to the file to all cells it overlaps with
19 | * @param dummy A null key that is never used
20 | * @param shape The shape to write to the output
21 | * @throws IOException
22 | */
23 | public void write(NullWritable dummy, S shape) throws IOException;
24 |
25 | /**
26 | * Writes the given shape to the specified cell
27 | * @param cellId
28 | * @param shape
29 | */
30 | public void write(int cellId, S shape) throws IOException;
31 |
32 | /**
33 | * Writes the given shape only to the given cell even if it overlaps
34 | * with other cells. This is used when the output is prepared to write
35 | * only one cell. The caller ensures that another call will write the object
36 | * to the other cell(s) later.
37 | * @param cellInfo
38 | * @param shape
39 | * @throws IOException
40 | */
41 | public void write(CellInfo cellInfo, S shape) throws IOException;
42 |
43 | /**
44 | * Sets a stock object used to serialize/deserialize objects when written to
45 | * disk.
46 | * @param shape
47 | */
48 | public void setStockObject(S shape);
49 |
50 | /**
51 | * Closes this writer
52 | * @param progressable To report the progress if the closing process takes
53 | * a very long time.
54 | * @throws IOException
55 | */
56 | public void close(Progressable progressable) throws IOException;
57 | }
58 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDChunkData.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * A data descriptor for chunk data.
16 | * Tag DFTAG_CHUNK
17 | * @author Ahmed Eldawy
18 | *
19 | */
20 | public class DDChunkData extends DataDescriptor {
21 |
22 | /**The data in this chunk*/
23 | protected byte[] data;
24 |
25 | DDChunkData(HDFFile hdfFile, int tagID, int refNo, int offset, int length,
26 | boolean extended) {
27 | super(hdfFile, tagID, refNo, offset, length, extended);
28 | }
29 |
30 | @Override
31 | protected void readFields(DataInput input) throws IOException {
32 | data = new byte[getLength()];
33 | input.readFully(data);
34 | }
35 |
36 | byte[] getData() throws IOException {
37 | lazyLoad();
38 | return data;
39 | }
40 |
41 | @Override
42 | public String toString() {
43 | try {
44 | lazyLoad();
45 | return String.format("Chunk data of length %d", getLength());
46 | } catch (IOException e) {
47 | return "Error loading "+super.toString();
48 | }
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDCompressedBlock.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.ByteArrayInputStream;
12 | import java.io.DataInput;
13 | import java.io.IOException;
14 | import java.io.InputStream;
15 | import java.util.zip.InflaterInputStream;
16 |
17 | /**
18 | * A block that stores compressed data.
19 | * TagID DFTAG_COMPRESSED
20 | * @author Ahmed Eldawy
21 | *
22 | */
23 | public class DDCompressedBlock extends DataDescriptor {
24 |
25 | DDCompressedBlock(HDFFile hdfFile, int tagID, int refNo, int offset,
26 | int length, boolean extended) {
27 | super(hdfFile, tagID, refNo, offset, length, extended);
28 | }
29 |
30 | @Override
31 | protected void readFields(DataInput input) throws IOException {
32 | // This method is not expected to be called directly because this block
33 | // does not have the necessary compression information to decompress
34 | // the raw data correctly
35 | System.err.println("This method should never be called directly on compressed blocks");
36 | }
37 |
38 | protected InputStream decompressDeflate(int level) throws IOException {
39 | // We need to retrieve the raw data first and then pass it to the
40 | // decompressor. Otherwise, the decompressor will not be able to determine
41 | // the end-of-file
42 | // TODO try to create a wrapper stream that reads only the correct amount
43 | // of bytes without having to load everything in memory
44 | byte[] rawData = new byte[getLength()];
45 | hdfFile.inStream.seek(offset);
46 | hdfFile.inStream.readFully(rawData);
47 | InputStream decompressedData =
48 | new InflaterInputStream(new ByteArrayInputStream(rawData));
49 | return decompressedData;
50 | }
51 |
52 | public String toString() {
53 | return String.format("Compressed block <%d, %d>", tagID, refNo);
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDID.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | /**
12 | * The full ID of a data descriptor. This class is immutable and its contents
13 | * are not allowed to change.
14 | * @author Ahmed Eldawy
15 | *
16 | */
17 | public class DDID {
18 | public final int tagID;
19 | public final int refNo;
20 |
21 | public DDID(int tag, int refNo) {
22 | super();
23 | this.tagID = tag;
24 | this.refNo = refNo;
25 | }
26 |
27 | @Override
28 | public int hashCode() {
29 | return tagID + refNo;
30 | }
31 |
32 | @Override
33 | public boolean equals(Object obj) {
34 | if (!(obj instanceof DDID))
35 | return false;
36 | DDID other = (DDID) obj;
37 | return this.tagID == other.tagID && this.refNo == other.refNo;
38 | }
39 |
40 | @Override
41 | public String toString() {
42 | return String.format("DD<%d,%d>", tagID, refNo);
43 | }
44 | }
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDLinkedBlock.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.ByteArrayInputStream;
12 | import java.io.DataInput;
13 | import java.io.DataInputStream;
14 | import java.io.IOException;
15 |
16 | /**
17 | * A data descriptor for linked block table
18 | * @author Ahmed Eldawy
19 | *
20 | */
21 | public class DDLinkedBlock extends DataDescriptor {
22 |
23 | /**Raw data*/
24 | public byte[] data;
25 |
26 | /**Reference number for next table. Zero if no more linked block tables*/
27 | protected int nextRef;
28 |
29 | /**A list of all block references stored in this table*/
30 | protected int[] blockReferences;
31 |
32 | /**
33 | * @param hdfFile
34 | * @param tagID
35 | * @param refNo
36 | * @param offset
37 | * @param length
38 | * @param extended
39 | */
40 | public DDLinkedBlock(HDFFile hdfFile, int tagID, int refNo, int offset,
41 | int length, boolean extended) {
42 | super(hdfFile, tagID, refNo, offset, length, extended);
43 | }
44 |
45 | @Override
46 | protected void readFields(DataInput input) throws IOException {
47 | // Note. We cannot parse the data at this point because a linked block
48 | // might refer to either a linked block table or data block according
49 | // to the context in which it appears
50 | data = new byte[getLength()];
51 | input.readFully(data);
52 | }
53 |
54 | /**
55 | * Reads all the data contained in this object
56 | * @return
57 | * @throws IOException
58 | */
59 | protected byte[] readContainedData() throws IOException {
60 | lazyLoad();
61 | return data;
62 | }
63 |
64 | /**
65 | * Retrieve the references of all contained block if this linked block is
66 | * a linked block table.
67 | * @return
68 | * @throws IOException
69 | */
70 | public int[] getBlockReferences() throws IOException {
71 | lazyLoad();
72 |
73 | if (blockReferences == null) {
74 | // Lazy parse
75 | DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
76 | nextRef = in.readUnsignedShort();
77 | blockReferences = new int[(data.length - 2) / 2];
78 | for (int i_blk = 0; i_blk < blockReferences.length; i_blk++)
79 | blockReferences[i_blk] = in.readUnsignedShort();
80 | in.close();
81 | }
82 |
83 | return blockReferences;
84 | }
85 |
86 | }
87 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDNull.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * Data descriptor with no data. This tag is used for place holding and to fill
16 | * empty portions of the data description block. The length and offset fields
17 | * are always zero.
18 | * @author Ahmed Eldawy
19 | *
20 | */
21 | public class DDNull extends DataDescriptor {
22 |
23 | public DDNull(HDFFile hdfFile, int tagID, int refNo, int offset,
24 | int length) {
25 | super(hdfFile, tagID, refNo, offset, length, false);
26 | }
27 |
28 | @Override
29 | protected void readFields(DataInput input) throws IOException {
30 | // Nothing to read
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDNumberType.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * @author Ahmed Eldawy
16 | *
17 | */
18 | public class DDNumberType extends DataDescriptor {
19 |
20 | /**Version number of the number type information*/
21 | protected int version;
22 |
23 | /**
24 | * Type of the data:
25 | * Unsigned integer, signed integer, unsigned character, character,
26 | * floating point, double precision floating point
27 | */
28 | protected int type;
29 |
30 | /**Number of bits, all of which are assumed to be significant*/
31 | protected int width;
32 |
33 | /**
34 | * A generic value, with different interpretations depending on type:
35 | * floating point, integer, or character.
36 | */
37 | protected int klass;
38 |
39 | DDNumberType(HDFFile hdfFile, int tagID, int refNo, int offset,
40 | int length, boolean extended) {
41 | super(hdfFile, tagID, refNo, offset, length, extended);
42 | }
43 |
44 | @Override
45 | protected void readFields(DataInput input) throws IOException {
46 | this.version = input.readUnsignedByte();
47 | this.type = input.readUnsignedByte();
48 | this.width = input.readUnsignedByte();
49 | this.klass = input.readUnsignedByte();
50 | }
51 |
52 | public int getNumberType() throws IOException {
53 | lazyLoad();
54 | return type;
55 | }
56 |
57 | public int getDataSize() throws IOException {
58 | lazyLoad();
59 | switch (type) {
60 | case HDFConstants.DFNT_UINT8: return 1;
61 | case HDFConstants.DFNT_INT16:
62 | case HDFConstants.DFNT_UINT16: return 2;
63 | case HDFConstants.DFNT_INT32: return 4;
64 | default: throw new RuntimeException("Unsupported type "+type);
65 | }
66 | }
67 |
68 | @Override
69 | public String toString() {
70 | try {
71 | lazyLoad();
72 | return String.format("Number type %d %d %d %d", version, type, width, klass);
73 | } catch (IOException e) {
74 | return "Error loading "+super.toString();
75 | }
76 | }
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDScientificData.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * Data descriptor for array of scientific data.
16 | * @author Eldawy
17 | *
18 | */
19 | public class DDScientificData extends DataDescriptor {
20 |
21 | /**Raw data*/
22 | public byte[] data;
23 |
24 | DDScientificData(HDFFile hdfFile, int tagID, int refNo, int offset,
25 | int length, boolean extended) {
26 | super(hdfFile, tagID, refNo, offset, length, extended);
27 | }
28 |
29 | @Override
30 | protected void readFields(DataInput input) throws IOException {
31 | data = new byte[getLength()];
32 | input.readFully(data);
33 | }
34 |
35 | @Override
36 | public String toString() {
37 | try {
38 | lazyLoad();
39 | return String.format("Scientific data of size %d", getLength());
40 | } catch (IOException e) {
41 | return "Error loading "+super.toString();
42 | }
43 | }
44 |
45 | public byte[] getData() throws IOException {
46 | lazyLoad();
47 | return data;
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDUnknown.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 |
10 | package edu.umn.cs.spatialHadoop.hdf;
11 |
12 | import java.io.DataInput;
13 | import java.io.IOException;
14 |
15 | /**
16 | * A place holder for data descriptors with unsupported tag number
17 | * @author Ahmed Eldawy
18 | *
19 | */
20 | public class DDUnknown extends DataDescriptor {
21 |
22 | public byte[] rawData;
23 |
24 | DDUnknown(HDFFile hdfFile, int tagID, int refNo, int offset, int length,
25 | boolean extended) {
26 | super(hdfFile, tagID, refNo, offset, length, extended);
27 | }
28 |
29 | @Override
30 | protected void readFields(DataInput input) throws IOException {
31 | }
32 |
33 | @Override
34 | public String toString() {
35 | try {
36 | lazyLoad();
37 | return String.format("Unknown tag %d with data of size %d", tagID, getLength());
38 | } catch (IOException e) {
39 | return "Error loading "+super.toString();
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDVSet.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * @author Ahmed Eldawy
16 | */
17 | public class DDVSet extends DataDescriptor {
18 |
19 | /**
20 | * The data stored in this set. This data can be interpreted only
21 | * according to the corresponding {@link DDVDataHeader}
22 | */
23 | protected byte[] data;
24 |
25 | DDVSet(HDFFile hdfFile, int tagID, int refNo, int offset, int length,
26 | boolean extended) {
27 | super(hdfFile, tagID, refNo, offset, length, extended);
28 | }
29 |
30 | @Override
31 | protected void readFields(DataInput input) throws IOException {
32 | this.data = new byte[getLength()];
33 | input.readFully(data);
34 | }
35 |
36 | byte[] getData() throws IOException {
37 | lazyLoad();
38 | return data;
39 | }
40 |
41 | @Override
42 | public String toString() {
43 | try {
44 | lazyLoad();
45 | return String.format("VSet of total size %d", data.length);
46 | } catch (IOException e) {
47 | return "Error loading "+super.toString();
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/hdf/DDVersion.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.hdf;
10 |
11 | import java.io.DataInput;
12 | import java.io.IOException;
13 |
14 | /**
15 | * Data descriptor for the library version number. It contains the complete
16 | * version number and a descriptive string for the latest version of the HDF
17 | * library used to write the file.
18 | * @author Ahmed Eldawy
19 | *
20 | */
21 | public class DDVersion extends DataDescriptor {
22 |
23 | /** Major version number */
24 | public int majorVersion;
25 |
26 | /** Minor version number */
27 | public int minorVersion;
28 |
29 | /** Release number */
30 | public int release;
31 |
32 | /**
33 | * A descriptive string for the latest version of the HDF library used to
34 | * write to the file
35 | */
36 | public String name;
37 |
38 | DDVersion(HDFFile hdfFile, int tagID, int refNo, int offset, int length,
39 | boolean extended) {
40 | super(hdfFile, tagID, refNo, offset, length, extended);
41 | }
42 |
43 | @Override
44 | protected void readFields(DataInput input) throws IOException {
45 | this.majorVersion = input.readInt();
46 | this.minorVersion = input.readInt();
47 | this.release = input.readInt();
48 | byte[] nameBytes = new byte[getLength() - 12];
49 | input.readFully(nameBytes);
50 | name = new String(nameBytes);
51 | }
52 |
53 | @Override
54 | public String toString() {
55 | try {
56 | lazyLoad();
57 | return String.format("Version %d.%d.%d '%s'", majorVersion, minorVersion, release, name);
58 | } catch (IOException e) {
59 | return "Error loading "+super.toString();
60 | }
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/indexing/RTreeGBPartitioner.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.indexing;
2 |
3 | import edu.umn.cs.spatialHadoop.core.Point;
4 | import edu.umn.cs.spatialHadoop.core.Rectangle;
5 | import org.apache.hadoop.conf.Configuration;
6 |
7 | /**
8 | * An implementation of the R-tree partitioner based on a gray box implementation of the R-tree linear-time split
9 | * algorithm. It seems a little bit weird to extend the {@link AbstractRTreeBBPartitioner} rather than the
10 | * {@link AbstractRTreeGBPartitioner} for this gray box implementation. However, since the auxiliary data structure is
11 | * not supported by the graybox R-tree partitioner, we found that it is closer to the BB implementation in this manner.
12 | */
13 | @Partitioner.GlobalIndexerMetadata(disjoint = true, extension = "rtreegb",
14 | requireSample = true)
15 | public class RTreeGBPartitioner extends AbstractRTreeBBPartitioner {
16 |
17 | /**The minimum fraction to use when applying the linear-time split algorithm*/
18 | protected float fractionMinSplitSize;
19 |
20 | @Override
21 | public RTreeGuttman createRTree(int m, int M) {
22 | return null;
23 | }
24 |
25 | @Override
26 | public void setup(Configuration conf) {
27 | super.setup(conf);
28 | this.mMRatio = conf.getFloat("mMRatio", 0.95f);
29 | this.fractionMinSplitSize = conf.getFloat("fractionMinSplitSize", 0.0f);
30 | }
31 |
32 | @Override
33 | public void construct(Rectangle mbr, Point[] points, int capacity) {
34 | double[] xs = new double[points.length];
35 | double[] ys = new double[points.length];
36 | for (int i = 0; i < points.length; i++) {
37 | xs[i] = points[i].x;
38 | ys[i] = points[i].y;
39 | }
40 | int M = capacity;
41 | int m = (int) Math.ceil(M * mMRatio);
42 | Rectangle[] partitions = RTreeGuttman.partitionPoints(xs, ys, m, M, fractionMinSplitSize);
43 | x1s = new double[partitions.length];
44 | y1s = new double[partitions.length];
45 | x2s = new double[partitions.length];
46 | y2s = new double[partitions.length];
47 | for (int i = 0; i < partitions.length; i++) {
48 | x1s[i] = partitions[i].x1;
49 | y1s[i] = partitions[i].y1;
50 | x2s[i] = partitions[i].x2;
51 | y2s[i] = partitions[i].y2;
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/InputSubstream.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import java.io.IOException;
12 | import java.io.InputStream;
13 |
14 | /**
15 | * A wrapper around a stream that limits it to read a fixed number
16 | * of bytes.
17 | * @author Ahmed Eldawy
18 | *
19 | */
20 | public class InputSubstream extends InputStream {
21 |
22 | private InputStream in;
23 |
24 | private long remainingBytes;
25 |
26 | /**
27 | *
28 | */
29 | public InputSubstream(InputStream in, long length) {
30 | this.in = in;
31 | this.remainingBytes = length;
32 | }
33 |
34 | @Override
35 | public int read() throws IOException {
36 | if (remainingBytes > 0) {
37 | remainingBytes--;
38 | return in.read();
39 | }
40 | return -1;
41 | }
42 |
43 | @Override
44 | public int available() throws IOException {
45 | return (int) Math.min(remainingBytes, 1024 * 1024);
46 | }
47 |
48 | @Override
49 | public void close() throws IOException {
50 | in.close();
51 | }
52 |
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/MemoryInputStream.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import java.io.ByteArrayInputStream;
12 | import java.io.IOException;
13 |
14 | import org.apache.hadoop.fs.PositionedReadable;
15 | import org.apache.hadoop.fs.Seekable;
16 |
17 | public class MemoryInputStream extends ByteArrayInputStream
18 | implements Seekable, PositionedReadable {
19 |
20 | int originalOffset;
21 |
22 | public MemoryInputStream(byte[] buf, int offset, int length) {
23 | super(buf, offset, length);
24 | originalOffset = offset;
25 | }
26 |
27 | public MemoryInputStream(byte[] buf) {
28 | super(buf);
29 | }
30 |
31 | public long getPos() {
32 | return pos - originalOffset;
33 | }
34 |
35 | @Override
36 | public void seek(long pos) throws IOException {
37 | this.mark = originalOffset;
38 | this.reset();
39 | this.skip(pos);
40 | }
41 |
42 | @Override
43 | public boolean seekToNewSource(long targetPos) throws IOException {
44 | return false;
45 | }
46 |
47 | @Override
48 | public int read(long position, byte[] buffer, int offset, int length)
49 | throws IOException {
50 | // TODO Auto-generated method stub
51 | return 0;
52 | }
53 |
54 | @Override
55 | public void readFully(long position, byte[] buffer, int offset, int length)
56 | throws IOException {
57 | System.arraycopy(buf, (int)(originalOffset+position), buffer, offset, length);
58 | }
59 |
60 | @Override
61 | public void readFully(long position, byte[] buffer) throws IOException {
62 | readFully(position, buffer, 0, buffer.length);
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/MemoryOutputStream.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import java.io.ByteArrayOutputStream;
12 |
13 | public class MemoryOutputStream extends ByteArrayOutputStream {
14 |
15 | public MemoryOutputStream(byte[] buffer) {
16 | this.buf = buffer;
17 | this.count = 0;
18 | }
19 |
20 | public int getLength() {
21 | return count;
22 | }
23 |
24 | public void clear() {
25 | count = 0;
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/Text2.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import org.apache.hadoop.io.Text;
12 |
13 | /**
14 | * A modified version of Text which is optimized for appends.
15 | * @author eldawy
16 | *
17 | */
18 | public class Text2 extends Text implements TextSerializable {
19 |
20 | public Text2() {
21 | }
22 |
23 | public Text2(String string) {
24 | super(string);
25 | }
26 |
27 | public Text2(Text utf8) {
28 | super(utf8);
29 | }
30 |
31 | public Text2(byte[] utf8) {
32 | super(utf8);
33 | }
34 |
35 | @Override
36 | public Text toText(Text text) {
37 | text.append(getBytes(), 0, getLength());
38 | return text;
39 | }
40 |
41 | @Override
42 | public void fromText(Text text) {
43 | this.set(text);
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/TextSerializable.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import org.apache.hadoop.io.Text;
12 |
13 | /**
14 | * Implementing this interface allows objects to be converted easily
15 | * to and from a string.
16 | * @author Ahmed Eldawy
17 | *
18 | */
19 | public interface TextSerializable {
20 | /**
21 | * Store current object as string in the given text appending text already there.
22 | * @param text The text object to append to.
23 | * @return The same text that was passed as a parameter
24 | */
25 | public Text toText(Text text);
26 |
27 | /**
28 | * Retrieve information from the given text.
29 | * @param text The text to parse
30 | */
31 | public void fromText(Text text);
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/io/TrackedOutputStream.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.io;
10 |
11 | import java.io.IOException;
12 | import java.io.OutputStream;
13 |
14 | /**
15 | * An output stream that keeps track of number of bytes written
16 | * @author Ahmed Eldawy
17 | *
18 | */
19 | public class TrackedOutputStream extends OutputStream {
20 |
21 | /**The underlying output stream*/
22 | private OutputStream rawOut;
23 |
24 | /**Number of bytes written to the output so far*/
25 | private long offset;
26 |
27 | public void write(int b) throws IOException {
28 | rawOut.write(b);
29 | this.offset++;
30 | }
31 |
32 | public int hashCode() {
33 | return rawOut.hashCode();
34 | }
35 |
36 | public boolean equals(Object obj) {
37 | return rawOut.equals(obj);
38 | }
39 |
40 | public void flush() throws IOException {
41 | rawOut.flush();
42 | }
43 |
44 | public void close() throws IOException {
45 | rawOut.close();
46 | }
47 |
48 | public TrackedOutputStream(OutputStream raw) {
49 | this.rawOut = raw;
50 | }
51 |
52 | public long getPos() {
53 | return offset;
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/BlockFilter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import org.apache.hadoop.conf.Configuration;
12 |
13 | import edu.umn.cs.spatialHadoop.core.ResultCollector;
14 | import edu.umn.cs.spatialHadoop.core.ResultCollector2;
15 | import edu.umn.cs.spatialHadoop.indexing.GlobalIndex;
16 | import edu.umn.cs.spatialHadoop.indexing.Partition;
17 |
18 | /**
19 | * An interface for filtering blocks before running map tasks.
20 | * @author Ahmed Eldawy
21 | *
22 | */
23 | public interface BlockFilter {
24 |
25 | /**
26 | * Configure the block filter the first time it is created.
27 | * @param conf
28 | */
29 | public void configure(Configuration conf);
30 |
31 | /**
32 | * Selects the blocks that need to be processed b a MapReduce job.
33 | * @param gIndex
34 | * @param output
35 | */
36 | public void selectCells(GlobalIndex gIndex,
37 | ResultCollector output);
38 |
39 | /**
40 | * Selects block pairs that need to be processed together by a binary
41 | * MapReduce job. A binary MapReduce job is a job that deals with two input
42 | * files that need to be processed together (e.g., spatial join).
43 | * @param gIndex1
44 | * @param gIndex2
45 | */
46 | public void selectCellPairs(GlobalIndex gIndex1,
47 | GlobalIndex gIndex2,
48 | ResultCollector2 output);
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/CombineBlockFilter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.util.Vector;
12 |
13 | import org.apache.hadoop.conf.Configuration;
14 |
15 | import edu.umn.cs.spatialHadoop.core.ResultCollector;
16 | import edu.umn.cs.spatialHadoop.indexing.GlobalIndex;
17 | import edu.umn.cs.spatialHadoop.indexing.Partition;
18 |
19 | /**
20 | * A block filter that combines multiple block filters with an AND clause.
21 | * @author Ahmed Eldawy
22 | *
23 | */
24 | public class CombineBlockFilter extends DefaultBlockFilter {
25 |
26 | /**A list of all underlying block filters*/
27 | private BlockFilter[] blockFilters;
28 |
29 | /**
30 | *
31 | */
32 | public CombineBlockFilter(BlockFilter bf1, BlockFilter bf2) {
33 | this.blockFilters = new BlockFilter[] {bf1, bf2};
34 | }
35 |
36 | @Override
37 | public void configure(Configuration conf) {
38 | for (BlockFilter bf : blockFilters)
39 | bf.configure(conf);
40 | }
41 |
42 | @Override
43 | public void selectCells(GlobalIndex gIndex,
44 | ResultCollector output) {
45 | final Vector selectedSoFar = new Vector();
46 | // First block filter is applied directly to the global index
47 | blockFilters[0].selectCells(gIndex, new ResultCollector() {
48 | @Override
49 | public void collect(Partition p) {
50 | selectedSoFar.add(p);
51 | }
52 | });
53 | // All remaining are served from the partitions selectedSoFar
54 | for (int i = 1; !selectedSoFar.isEmpty() && i < blockFilters.length; i++) {
55 | BlockFilter bf = blockFilters[i];
56 | gIndex = new GlobalIndex();
57 | gIndex.bulkLoad(selectedSoFar.toArray(new Partition[selectedSoFar.size()]));
58 | bf.selectCells(gIndex, new ResultCollector() {
59 | @Override
60 | public void collect(Partition p) {
61 | selectedSoFar.add(p);
62 | }
63 | });
64 | }
65 | // Match with whatever selected partitions
66 | for (Partition p : selectedSoFar)
67 | output.collect(p);
68 | }
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/DefaultBlockFilter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import org.apache.hadoop.conf.Configuration;
12 |
13 | import edu.umn.cs.spatialHadoop.core.ResultCollector;
14 | import edu.umn.cs.spatialHadoop.core.ResultCollector2;
15 | import edu.umn.cs.spatialHadoop.indexing.GlobalIndex;
16 | import edu.umn.cs.spatialHadoop.indexing.Partition;
17 |
18 | /**
19 | * A default implementation for BlockFilter that returns everything.
20 | * @author eldawy
21 | *
22 | */
23 | public class DefaultBlockFilter implements BlockFilter {
24 |
25 | @Override
26 | public void configure(Configuration conf) {
27 | // Do nothing
28 | }
29 |
30 | @Override
31 | public void selectCells(GlobalIndex gIndex,
32 | ResultCollector output) {
33 | // Do nothing
34 | }
35 |
36 | @Override
37 | public void selectCellPairs(GlobalIndex gIndex1,
38 | GlobalIndex gIndex2,
39 | ResultCollector2 output) {
40 | // Do nothing
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridOutputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.fs.FileSystem;
14 | import org.apache.hadoop.io.IntWritable;
15 | import org.apache.hadoop.mapred.FileOutputFormat;
16 | import org.apache.hadoop.mapred.JobConf;
17 | import org.apache.hadoop.mapred.RecordWriter;
18 | import org.apache.hadoop.util.Progressable;
19 |
20 | import edu.umn.cs.spatialHadoop.core.CellInfo;
21 | import edu.umn.cs.spatialHadoop.core.Shape;
22 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
23 |
24 | public class GridOutputFormat extends FileOutputFormat {
25 |
26 | @Override
27 | public RecordWriter getRecordWriter(FileSystem ignored,
28 | JobConf job,
29 | String name,
30 | Progressable progress)
31 | throws IOException {
32 | // Get grid info
33 | CellInfo[] cellsInfo = SpatialSite.getCells(job);
34 | GridRecordWriter writer = new GridRecordWriter(job, name, cellsInfo);
35 | return writer;
36 | }
37 |
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridOutputFormat2.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.fs.FileSystem;
14 | import org.apache.hadoop.io.NullWritable;
15 | import org.apache.hadoop.mapred.FileOutputFormat;
16 | import org.apache.hadoop.mapred.JobConf;
17 | import org.apache.hadoop.mapred.RecordWriter;
18 | import org.apache.hadoop.util.Progressable;
19 |
20 | import edu.umn.cs.spatialHadoop.core.CellInfo;
21 | import edu.umn.cs.spatialHadoop.core.Shape;
22 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
23 |
24 | public class GridOutputFormat2 extends FileOutputFormat {
25 |
26 | @Override
27 | public RecordWriter getRecordWriter(FileSystem ignored,
28 | JobConf job,
29 | String name,
30 | Progressable progress)
31 | throws IOException {
32 | // Get grid info
33 | CellInfo[] cellsInfo = SpatialSite.getCells(job);
34 | GridRecordWriter2 writer = new GridRecordWriter2(job, name, cellsInfo);
35 | return writer;
36 | }
37 |
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridOutputFormat3.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.fs.FileSystem;
14 | import org.apache.hadoop.mapred.FileOutputFormat;
15 | import org.apache.hadoop.mapred.JobConf;
16 | import org.apache.hadoop.mapred.RecordWriter;
17 | import org.apache.hadoop.util.Progressable;
18 |
19 | import edu.umn.cs.spatialHadoop.core.CellInfo;
20 | import edu.umn.cs.spatialHadoop.core.Rectangle;
21 | import edu.umn.cs.spatialHadoop.core.Shape;
22 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
23 |
24 | public class GridOutputFormat3 extends FileOutputFormat {
25 |
26 | @Override
27 | public RecordWriter getRecordWriter(FileSystem ignored,
28 | JobConf job,
29 | String name,
30 | Progressable progress)
31 | throws IOException {
32 | // Get grid info
33 | CellInfo[] cellsInfo = SpatialSite.getCells(job);
34 | GridRecordWriter3 writer = new GridRecordWriter3(job, name, cellsInfo);
35 | return writer;
36 | }
37 |
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridRecordWriter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.io.IntWritable;
14 | import org.apache.hadoop.mapred.JobConf;
15 | import org.apache.hadoop.mapred.RecordWriter;
16 | import org.apache.hadoop.mapred.Reporter;
17 |
18 | import edu.umn.cs.spatialHadoop.core.CellInfo;
19 | import edu.umn.cs.spatialHadoop.core.Shape;
20 |
21 | /**
22 | * A record writer that can be used in MapReduce programs to write an index
23 | * file where the key is the cell ID and the value is the shape to write to
24 | * that cell. A given shape is not implicitly replicated to any other cells
25 | * other than the one provided.
26 | *
27 | * @author Ahmed Eldawy
28 | *
29 | * @param
30 | */
31 | public class GridRecordWriter
32 | extends edu.umn.cs.spatialHadoop.core.GridRecordWriter implements RecordWriter {
33 |
34 | public GridRecordWriter(JobConf job, String name, CellInfo[] cells) throws IOException {
35 | super(null, job, name, cells);
36 | }
37 |
38 | @Override
39 | public void write(IntWritable key, S value) throws IOException {
40 | super.write(key.get(), value);
41 | }
42 |
43 | @Override
44 | public void close(Reporter reporter) throws IOException {
45 | super.close(reporter);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridRecordWriter2.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.io.NullWritable;
14 | import org.apache.hadoop.mapred.JobConf;
15 | import org.apache.hadoop.mapred.RecordWriter;
16 | import org.apache.hadoop.mapred.Reporter;
17 |
18 | import edu.umn.cs.spatialHadoop.core.CellInfo;
19 | import edu.umn.cs.spatialHadoop.core.Shape;
20 |
21 | /**
22 | * A record writer that can be used in MapReduce programs. It writes pairs
23 | * where the key is {@link NullWritable}(i.e., not provided) and the value
24 | * is a shape. The given shape is replicated to every cell it overlaps with.
25 | * @author Ahmed Eldawy
26 | *
27 | * @param
28 | */
29 | public class GridRecordWriter2
30 | extends edu.umn.cs.spatialHadoop.core.GridRecordWriter implements RecordWriter {
31 |
32 | public GridRecordWriter2(JobConf job, String name, CellInfo[] cells) throws IOException {
33 | super(null, job, name, cells);
34 | }
35 |
36 | @Override
37 | public void write(NullWritable key, S value) throws IOException {
38 | super.write(key, value);
39 | }
40 |
41 | @Override
42 | public void close(Reporter reporter) throws IOException {
43 | super.close(reporter);
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/GridRecordWriter3.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.mapred.JobConf;
14 | import org.apache.hadoop.mapred.RecordWriter;
15 | import org.apache.hadoop.mapred.Reporter;
16 |
17 | import edu.umn.cs.spatialHadoop.core.CellInfo;
18 | import edu.umn.cs.spatialHadoop.core.Rectangle;
19 | import edu.umn.cs.spatialHadoop.core.Shape;
20 |
21 | /**
22 | * A record writer to be used to write the output of MapReduce programs to
23 | * a spatial index. The key is a rectangle which indicates the MBR of the
24 | * partition and value is a shape. This record writer does not replicate the
25 | * given shape to partition (i.e., write it only to the given partition). If
26 | * the provided rectangle (key) does not match any of the existing partitions,
27 | * a new partition is created with the given boundaries.
28 | * @author Ahmed Eldawy
29 | *
30 | * @param
31 | */
32 | public class GridRecordWriter3
33 | extends edu.umn.cs.spatialHadoop.core.GridRecordWriter implements RecordWriter {
34 |
35 | public GridRecordWriter3(JobConf job, String name, CellInfo[] cells) throws IOException {
36 | super(null, job, name, cells);
37 | }
38 |
39 | @Override
40 | public void write(Rectangle key, S value) throws IOException {
41 |
42 | super.write(key, value);
43 | }
44 |
45 | @Override
46 | public void close(Reporter reporter) throws IOException {
47 | super.close(reporter);
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/PairWritable.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.DataInput;
12 | import java.io.DataOutput;
13 | import java.io.IOException;
14 |
15 | import org.apache.hadoop.io.Writable;
16 |
17 | /**A class that stores a pair of objects where both are writable*/
18 | public class PairWritable implements Writable {
19 | public T first;
20 | public T second;
21 |
22 | public PairWritable() {}
23 |
24 | public PairWritable(T first, T second) {
25 | this.first = first;
26 | this.second = second;
27 | }
28 |
29 | @Override
30 | public void write(DataOutput out) throws IOException {
31 | first.write(out);
32 | second.write(out);
33 | }
34 |
35 | @Override
36 | public void readFields(DataInput in) throws IOException {
37 | first.readFields(in);
38 | second.readFields(in);
39 | }
40 |
41 | @Override
42 | public String toString() {
43 | return "<"+first.toString()+", "+second.toString()+">";
44 | }
45 | }
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeArrayInputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 | import java.io.IOException;
11 |
12 | import org.apache.hadoop.io.ArrayWritable;
13 | import org.apache.hadoop.mapred.InputSplit;
14 | import org.apache.hadoop.mapred.JobConf;
15 | import org.apache.hadoop.mapred.RecordReader;
16 | import org.apache.hadoop.mapred.Reporter;
17 |
18 | import edu.umn.cs.spatialHadoop.core.Rectangle;
19 |
20 | /**
21 | * Reads a file stored as a list of RTrees
22 | * @author eldawy
23 | *
24 | */
25 | public class ShapeArrayInputFormat extends SpatialInputFormat {
26 |
27 | @Override
28 | public RecordReader getRecordReader(InputSplit split,
29 | JobConf job, Reporter reporter) throws IOException {
30 | if (reporter != null)
31 | reporter.setStatus(split.toString());
32 | this.rrClass = ShapeArrayRecordReader.class;
33 | return super.getRecordReader(split, job, reporter);
34 | }
35 | }
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeArrayRecordReader.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 | import java.io.InputStream;
13 |
14 | import org.apache.commons.logging.Log;
15 | import org.apache.commons.logging.LogFactory;
16 | import org.apache.hadoop.conf.Configuration;
17 | import org.apache.hadoop.io.ArrayWritable;
18 | import org.apache.hadoop.mapred.FileSplit;
19 | import org.apache.hadoop.mapred.Reporter;
20 | import org.apache.hadoop.mapred.lib.CombineFileSplit;
21 |
22 | import edu.umn.cs.spatialHadoop.OperationsParams;
23 | import edu.umn.cs.spatialHadoop.core.Rectangle;
24 | import edu.umn.cs.spatialHadoop.core.Shape;
25 |
26 |
27 | /**
28 | * Reads a file as a list of RTrees
29 | * @author Ahmed Eldawy
30 | *
31 | */
32 | public class ShapeArrayRecordReader extends SpatialRecordReader {
33 | public static final Log LOG = LogFactory.getLog(ShapeArrayRecordReader.class);
34 |
35 | /**Shape used to deserialize shapes from disk*/
36 | private Shape shape;
37 |
38 | public ShapeArrayRecordReader(CombineFileSplit split, Configuration conf,
39 | Reporter reporter, Integer index) throws IOException {
40 | super(split, conf, reporter, index);
41 | shape = OperationsParams.getShape(conf, "shape");
42 | }
43 |
44 | public ShapeArrayRecordReader(Configuration job, FileSplit split)
45 | throws IOException {
46 | super(job, split);
47 | shape = OperationsParams.getShape(job, "shape");
48 | }
49 |
50 | public ShapeArrayRecordReader(InputStream is, long offset, long endOffset)
51 | throws IOException {
52 | super(is, offset, endOffset);
53 | }
54 |
55 | @Override
56 | public boolean next(Rectangle key, ArrayWritable shapes) throws IOException {
57 | // Get cellInfo for the current position in file
58 | boolean element_read = nextShapes(shapes);
59 | key.set(cellMbr); // Set the cellInfo for the last block read
60 | return element_read;
61 | }
62 |
63 | @Override
64 | public Rectangle createKey() {
65 | return new Rectangle();
66 | }
67 |
68 | @Override
69 | public ArrayWritable createValue() {
70 | return new ArrayWritable(shape.getClass());
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeInputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.mapred.InputSplit;
14 | import org.apache.hadoop.mapred.JobConf;
15 | import org.apache.hadoop.mapred.RecordReader;
16 | import org.apache.hadoop.mapred.Reporter;
17 |
18 | import edu.umn.cs.spatialHadoop.core.Rectangle;
19 | import edu.umn.cs.spatialHadoop.core.Shape;
20 |
21 |
22 | /**
23 | * An input format used with spatial data. It filters generated splits before
24 | * creating record readers.
25 | * @author Ahmed Eldawy
26 | *
27 | * @param
28 | */
29 | public class ShapeInputFormat extends SpatialInputFormat {
30 |
31 | @Override
32 | public RecordReader getRecordReader(InputSplit split,
33 | JobConf job, Reporter reporter) throws IOException {
34 | if (reporter != null)
35 | reporter.setStatus(split.toString());
36 | this.rrClass = ShapeRecordReader.class;
37 | return super.getRecordReader(split, job, reporter);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeIterInputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 | import java.io.IOException;
11 |
12 | import org.apache.hadoop.mapred.InputSplit;
13 | import org.apache.hadoop.mapred.JobConf;
14 | import org.apache.hadoop.mapred.RecordReader;
15 | import org.apache.hadoop.mapred.Reporter;
16 |
17 | import edu.umn.cs.spatialHadoop.core.Rectangle;
18 | import edu.umn.cs.spatialHadoop.core.Shape;
19 |
20 | /**
21 | * Reads a file stored as a list of RTrees
22 | * @author eldawy
23 | *
24 | */
25 | public class ShapeIterInputFormat extends SpatialInputFormat> {
26 |
27 | @Override
28 | public RecordReader> getRecordReader(InputSplit split,
29 | JobConf job, Reporter reporter) throws IOException {
30 | if (reporter != null)
31 | reporter.setStatus(split.toString());
32 | this.rrClass = ShapeIterRecordReader.class;
33 | return super.getRecordReader(split, job, reporter);
34 | }
35 | }
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeIterRecordReader.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 | import java.io.InputStream;
13 |
14 | import org.apache.commons.logging.Log;
15 | import org.apache.commons.logging.LogFactory;
16 | import org.apache.hadoop.conf.Configuration;
17 | import org.apache.hadoop.mapred.FileSplit;
18 | import org.apache.hadoop.mapred.Reporter;
19 | import org.apache.hadoop.mapred.lib.CombineFileSplit;
20 |
21 | import edu.umn.cs.spatialHadoop.OperationsParams;
22 | import edu.umn.cs.spatialHadoop.core.Rectangle;
23 | import edu.umn.cs.spatialHadoop.core.Shape;
24 | import edu.umn.cs.spatialHadoop.mapred.SpatialRecordReader.ShapeIterator;
25 |
26 |
27 | /**
28 | * Reads a file as a list of RTrees
29 | * @author Ahmed Eldawy
30 | *
31 | */
32 | public class ShapeIterRecordReader extends SpatialRecordReader {
33 | public static final Log LOG = LogFactory.getLog(ShapeIterRecordReader.class);
34 | private Shape shape;
35 |
36 | public ShapeIterRecordReader(CombineFileSplit split, Configuration conf,
37 | Reporter reporter, Integer index) throws IOException {
38 | super(split, conf, reporter, index);
39 | this.shape = OperationsParams.getShape(conf, "shape");
40 | }
41 |
42 | public ShapeIterRecordReader(Configuration conf, FileSplit split)
43 | throws IOException {
44 | super(conf, split);
45 | this.shape = OperationsParams.getShape(conf, "shape");
46 | }
47 |
48 | public ShapeIterRecordReader(InputStream is, long offset, long endOffset)
49 | throws IOException {
50 | super(is, offset, endOffset);
51 | }
52 |
53 | public void setShape(Shape shape) {
54 | this.shape = shape;
55 | }
56 |
57 | @Override
58 | public boolean next(Rectangle key, ShapeIterator shapeIter) throws IOException {
59 | // Get cellInfo for the current position in file
60 | boolean element_read = nextShapeIter(shapeIter);
61 | key.set(cellMbr); // Set the cellInfo for the last block read
62 | return element_read;
63 | }
64 |
65 | @Override
66 | public Rectangle createKey() {
67 | return new Rectangle();
68 | }
69 |
70 | @Override
71 | public ShapeIterator createValue() {
72 | ShapeIterator shapeIter = new ShapeIterator();
73 | shapeIter.setShape(shape);
74 | return shapeIter;
75 | }
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeLineInputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.io.Text;
14 | import org.apache.hadoop.mapred.InputSplit;
15 | import org.apache.hadoop.mapred.JobConf;
16 | import org.apache.hadoop.mapred.RecordReader;
17 | import org.apache.hadoop.mapred.Reporter;
18 |
19 | import edu.umn.cs.spatialHadoop.core.Rectangle;
20 |
21 |
22 | /**
23 | * An input format used with spatial data. It filters generated splits before
24 | * creating record readers.
25 | * @author Ahmed Eldawy
26 | *
27 | */
28 | public class ShapeLineInputFormat extends SpatialInputFormat {
29 |
30 | @Override
31 | public RecordReader getRecordReader(InputSplit split,
32 | JobConf job, Reporter reporter) throws IOException {
33 | if (reporter != null)
34 | reporter.setStatus(split.toString());
35 | this.rrClass = ShapeLineRecordReader.class;
36 | return super.getRecordReader(split, job, reporter);
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeLineRecordReader.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 | import java.io.InputStream;
13 |
14 | import org.apache.hadoop.conf.Configuration;
15 | import org.apache.hadoop.io.Text;
16 | import org.apache.hadoop.mapred.FileSplit;
17 | import org.apache.hadoop.mapred.Reporter;
18 | import org.apache.hadoop.mapred.lib.CombineFileSplit;
19 |
20 | import edu.umn.cs.spatialHadoop.core.Rectangle;
21 | import edu.umn.cs.spatialHadoop.core.Shape;
22 |
23 | /**
24 | * A record reader for objects of class {@link Shape}
25 | * @author Ahmed Eldawy
26 | *
27 | */
28 | public class ShapeLineRecordReader
29 | extends SpatialRecordReader {
30 |
31 | public ShapeLineRecordReader(Configuration job, FileSplit split)
32 | throws IOException {
33 | super(job, split);
34 | }
35 |
36 | public ShapeLineRecordReader(CombineFileSplit split, Configuration conf,
37 | Reporter reporter, Integer index) throws IOException {
38 | super(split, conf, reporter, index);
39 | }
40 |
41 | public ShapeLineRecordReader(InputStream in, long offset, long endOffset)
42 | throws IOException {
43 | super(in, offset, endOffset);
44 | }
45 |
46 | @Override
47 | public boolean next(Rectangle key, Text shapeLine) throws IOException {
48 | boolean read_line = nextLine(shapeLine);
49 | key.set(cellMbr);
50 | return read_line;
51 | }
52 |
53 | @Override
54 | public Rectangle createKey() {
55 | return new Rectangle();
56 | }
57 |
58 | @Override
59 | public Text createValue() {
60 | return new Text();
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/mapred/ShapeRecordReader.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.mapred;
10 |
11 | import java.io.IOException;
12 | import java.io.InputStream;
13 |
14 | import org.apache.commons.logging.Log;
15 | import org.apache.commons.logging.LogFactory;
16 | import org.apache.hadoop.conf.Configuration;
17 | import org.apache.hadoop.mapred.FileSplit;
18 | import org.apache.hadoop.mapred.Reporter;
19 | import org.apache.hadoop.mapred.lib.CombineFileSplit;
20 |
21 | import edu.umn.cs.spatialHadoop.core.Rectangle;
22 | import edu.umn.cs.spatialHadoop.core.Shape;
23 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
24 |
25 | /**
26 | * A record reader for objects of class {@link Shape}
27 | * @author Ahmed Eldawy
28 | *
29 | */
30 | public class ShapeRecordReader
31 | extends SpatialRecordReader {
32 |
33 | @SuppressWarnings("unused")
34 | private static final Log LOG = LogFactory.getLog(ShapeRecordReader.class);
35 |
36 | /**Object used for deserialization*/
37 | private S stockShape;
38 |
39 | public ShapeRecordReader(Configuration job, FileSplit split)
40 | throws IOException {
41 | super(job, split);
42 | stockShape = (S) SpatialSite.createStockShape(job);
43 | }
44 |
45 | public ShapeRecordReader(CombineFileSplit split, Configuration conf,
46 | Reporter reporter, Integer index) throws IOException {
47 | super(split, conf, reporter, index);
48 | stockShape = (S) SpatialSite.createStockShape(conf);
49 | }
50 |
51 | public ShapeRecordReader(InputStream in, long offset, long endOffset)
52 | throws IOException {
53 | super(in, offset, endOffset);
54 | }
55 |
56 | @Override
57 | public boolean next(Rectangle key, S shape) throws IOException {
58 | boolean read_line = nextShape(shape);
59 | key.set(cellMbr);
60 | return read_line;
61 | }
62 |
63 | @Override
64 | public Rectangle createKey() {
65 | return new Rectangle();
66 | }
67 |
68 | @Override
69 | public S createValue() {
70 | return stockShape;
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/nasa/GeoProjector.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.nasa;
10 |
11 | import edu.umn.cs.spatialHadoop.core.Shape;
12 |
13 | /**
14 | * Converts a point from the latitude/longitude space to another projection.
15 | * @author Ahmed Eldawy
16 | *
17 | */
18 | public interface GeoProjector {
19 | /**
20 | * Converts the given point (in-place) provided in latitude/longitude space.
21 | * @param shape
22 | */
23 | public void project(Shape shape);
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/nasa/MercatorProjector.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 |
10 | package edu.umn.cs.spatialHadoop.nasa;
11 |
12 | import edu.umn.cs.spatialHadoop.core.Point;
13 | import edu.umn.cs.spatialHadoop.core.Rectangle;
14 | import edu.umn.cs.spatialHadoop.core.Shape;
15 |
16 |
17 | /**
18 | * Projects a NASAPoint in HDF file from Sinusoidal projection to Mercator
19 | * projection.
20 | * @author Ahmed Eldawy
21 | *
22 | */
23 | public class MercatorProjector implements GeoProjector {
24 |
25 | @Override
26 | public void project(Shape shape) {
27 | if (shape instanceof Point) {
28 | Point pt = (Point) shape;
29 | // Use the Mercator projection to draw an image similar to Google Maps
30 | // http://stackoverflow.com/questions/14329691/covert-latitude-longitude-point-to-a-pixels-x-y-on-mercator-projection
31 | double latRad = pt.y * Math.PI / 180.0;
32 | double mercN = Math.log(Math.tan(Math.PI/4-latRad/2));
33 | pt.y = -180 * mercN / Math.PI;
34 | } else if (shape instanceof Rectangle) {
35 | Rectangle rect = (Rectangle) shape;
36 | double latRad = rect.y1 * Math.PI / 180.0;
37 | double mercN = Math.log(Math.tan(Math.PI/4-latRad/2));
38 | rect.y1 = -180 * mercN / Math.PI;
39 |
40 | latRad = rect.y2 * Math.PI / 180.0;
41 | mercN = Math.log(Math.tan(Math.PI/4-latRad/2));
42 | rect.y2 = -180 * mercN / Math.PI;
43 | } else {
44 | throw new RuntimeException("Cannot project shapes of type "+shape.getClass());
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/nasa/NASAShape.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.nasa;
10 |
11 | import edu.umn.cs.spatialHadoop.core.Shape;
12 |
13 | /**
14 | * A shape coming from NASA datasets. It contains an extra value corresponding
15 | * to the physical reading of the underlying area.
16 | * @author Ahmed Eldawy
17 | *
18 | */
19 | public interface NASAShape extends Shape {
20 | public void setValue(int v);
21 | public int getValue();
22 |
23 | public void setTimestamp(long t);
24 | public long getTimestamp();
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/operations/Main.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.operations;
10 |
11 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
12 | import org.apache.hadoop.conf.Configuration;
13 | import org.apache.hadoop.util.ProgramDriver;
14 |
15 | import edu.umn.cs.spatialHadoop.ReadFile;
16 | import edu.umn.cs.spatialHadoop.indexing.Indexer;
17 | import edu.umn.cs.spatialHadoop.nasa.AggregateQuadTree;
18 | import edu.umn.cs.spatialHadoop.nasa.HDFPlot;
19 | import edu.umn.cs.spatialHadoop.nasa.HDFToText;
20 | import edu.umn.cs.spatialHadoop.nasa.MultiHDFPlot;
21 | import edu.umn.cs.spatialHadoop.nasa.ShahedServer;
22 | import edu.umn.cs.spatialHadoop.visualization.GeometricPlot;
23 | import edu.umn.cs.spatialHadoop.visualization.HadoopvizServer;
24 | import edu.umn.cs.spatialHadoop.visualization.HeatMapPlot;
25 | import edu.umn.cs.spatialHadoop.visualization.LakesPlot;
26 | import edu.umn.cs.spatialHadoop.visualization.MagickPlot;
27 | import edu.umn.cs.spatialHadoop.delaunay.DelaunayTriangulation;
28 | import org.yaml.snakeyaml.Yaml;
29 |
30 | import java.util.List;
31 | import java.util.Map;
32 |
33 |
34 | /**
35 | * The main entry point to all queries.
36 | *
37 | * @author Ahmed Eldawy
38 | *
39 | */
40 | public class Main {
41 |
42 | static {
43 | // Load configuration from files
44 | Configuration.addDefaultResource("spatial-default.xml");
45 | Configuration.addDefaultResource("spatial-site.xml");
46 | }
47 |
48 | public static void main(String[] args) {
49 | int exitCode = -1;
50 | ProgramDriver pgd = new ProgramDriver();
51 | try {
52 | // Add classes from a configuration file
53 | Yaml yaml = new Yaml();
54 | List ops = yaml.load(SpatialSite.class.getResourceAsStream("/spatial-operations.yaml"));
55 | for (String op : ops) {
56 | Class> opClass = Class.forName(op);
57 | OperationMetadata opMetadata = opClass.getAnnotation(OperationMetadata.class);
58 | pgd.addClass(opMetadata.shortName(), opClass, opMetadata.description());
59 | }
60 |
61 | pgd.driver(args);
62 |
63 | // Success
64 | exitCode = 0;
65 | }
66 | catch(Throwable e){
67 | e.printStackTrace();
68 | }
69 |
70 | System.exit(exitCode);
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/operations/OperationMetadata.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.operations;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * This annotation describes the metadata of an operation that can be accessed
10 | * from command line.
11 | */
12 | @Target(ElementType.TYPE)
13 | @Retention(RetentionPolicy.RUNTIME)
14 | public @interface OperationMetadata {
15 | /**The short name used to access this operation from command line*/
16 | String shortName();
17 |
18 | /**A description of this operation*/
19 | String description();
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/operations/RangeFilter.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.operations;
2 |
3 | import org.apache.hadoop.conf.Configuration;
4 |
5 | import edu.umn.cs.spatialHadoop.OperationsParams;
6 | import edu.umn.cs.spatialHadoop.core.Rectangle;
7 | import edu.umn.cs.spatialHadoop.core.ResultCollector;
8 | import edu.umn.cs.spatialHadoop.core.Shape;
9 | import edu.umn.cs.spatialHadoop.indexing.GlobalIndex;
10 | import edu.umn.cs.spatialHadoop.indexing.Partition;
11 | import edu.umn.cs.spatialHadoop.mapred.DefaultBlockFilter;
12 |
13 | /**
14 | * A filter function that selects partitions overlapping with a query range.
15 | * @author Ahmed Eldawy
16 | *
17 | */
18 | public class RangeFilter extends DefaultBlockFilter {
19 | /**Configuration parameter for setting a search query range*/
20 | public static final String QueryRange = "RangeFilter.QueryRange";
21 |
22 | /**A shape that is used to filter input*/
23 | private Shape queryRange;
24 |
25 | public RangeFilter() {}
26 |
27 | public RangeFilter(Shape shape) {
28 | this.queryRange = shape.clone();
29 | }
30 |
31 | @Override
32 | public void configure(Configuration job) {
33 | // If not set in constructor, read queryRange from the job configuration
34 | if (this.queryRange == null)
35 | this.queryRange = OperationsParams.getShape(job, QueryRange);
36 | }
37 |
38 | @Override
39 | public void selectCells(GlobalIndex gIndex,
40 | ResultCollector output) {
41 | int numPartitions;
42 | if (gIndex.isReplicated()) {
43 | // Need to process all partitions to perform duplicate avoidance
44 | numPartitions = gIndex.rangeQuery(queryRange, output);
45 | RangeQuery.LOG.info("Selected "+numPartitions+" partitions overlapping "+queryRange);
46 | } else {
47 | Rectangle queryMBR = this.queryRange.getMBR();
48 | // Need to process only partitions on the perimeter of the query range
49 | // Partitions that are totally contained in query range should not be
50 | // processed and should be copied to output directly
51 | numPartitions = 0;
52 | for (Partition p : gIndex) {
53 | if (queryMBR.contains(p)) {
54 | // TODO partitions totally contained in query range should be copied
55 | // to output directly
56 |
57 | // XXX Until hard links are supported, R-tree blocks are processed
58 | // similar to R+-tree
59 | if (p.isIntersected(queryRange)) {
60 | output.collect(p);
61 | numPartitions++;
62 | }
63 | } else if (p.isIntersected(queryMBR) && p.isIntersected(queryRange)) {
64 | output.collect(p);
65 | numPartitions++;
66 | }
67 | }
68 | RangeQuery.LOG.info("Selected "+numPartitions+" partitions on the perimeter of "+queryMBR);
69 | }
70 | }
71 | }
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/osm/HasTag.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.osm;
10 |
11 | import java.io.IOException;
12 | import java.util.Map;
13 |
14 | import javax.xml.parsers.ParserConfigurationException;
15 |
16 | import org.apache.pig.EvalFunc;
17 | import org.apache.pig.data.Tuple;
18 |
19 | /**
20 | * Checks if the given map contains any combination of the given keys and values
21 | * @author Ahmed Eldawy
22 | *
23 | */
24 | public class HasTag extends EvalFunc {
25 |
26 | public HasTag() throws ParserConfigurationException {
27 | }
28 |
29 | @Override
30 | public Boolean exec(Tuple input) throws IOException {
31 | if (input == null || input.size() == 0)
32 | return null;
33 |
34 | if (input.size() < 2)
35 | throw new IOException("HasTag takes at least two parameters");
36 |
37 | Map tags = (Map) input.get(0);
38 | String keys = (String)input.get(1);
39 | String values = input.size() > 2 ? (String)input.get(2) : null;
40 |
41 | return hasTag(tags, keys, values);
42 | }
43 |
44 | public static boolean hasTag(Map tags, String keys, String values) {
45 | if (values == null) {
46 | for (Map.Entry entry : tags.entrySet())
47 | if (keys.contains(entry.getKey()))
48 | return true;
49 | } else {
50 | for (Map.Entry entry : tags.entrySet())
51 | if (keys.contains(entry.getKey()) && values.contains(entry.getValue()))
52 | return true;
53 | }
54 | return false;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/osm/MapToJson.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.osm;
10 |
11 | import java.io.IOException;
12 | import java.util.Map;
13 |
14 | import javax.xml.parsers.ParserConfigurationException;
15 |
16 | import org.apache.pig.EvalFunc;
17 | import org.apache.pig.data.Tuple;
18 |
19 | /**
20 | * Converts a Key-value hash map to JSON format.
21 | * @author Ahmed Eldawy
22 | *
23 | */
24 | public class MapToJson extends EvalFunc {
25 |
26 | public MapToJson() throws ParserConfigurationException {
27 | }
28 |
29 | @Override
30 | public String exec(Tuple input) throws IOException {
31 | if (input == null || input.size() != 1)
32 | throw new IOException("Invalid number of arguments "+input.size());
33 |
34 | if (!(input.get(0) instanceof Map))
35 | throw new IOException("Invalid argument type "+input.get(0).getClass());
36 |
37 | Map tags = (Map) input.get(0);
38 | StringBuffer result = new StringBuffer();
39 | for (Map.Entry entry : tags.entrySet()) {
40 | result.append("{");
41 | result.append('"'+entry.getKey()+'"');
42 | result.append('=');
43 | result.append('"'+entry.getValue()+'"');
44 | result.append("}");
45 | }
46 | return result.toString();
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/osm/OSMPoint.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.osm;
10 |
11 | import java.io.DataInput;
12 | import java.io.DataOutput;
13 | import java.io.IOException;
14 | import java.util.HashMap;
15 | import java.util.Map;
16 |
17 | import org.apache.hadoop.io.Text;
18 |
19 | import edu.umn.cs.spatialHadoop.core.Point;
20 | import edu.umn.cs.spatialHadoop.io.TextSerializerHelper;
21 |
22 |
23 | public class OSMPoint extends Point {
24 | public long id;
25 | public Map tags = new HashMap();
26 |
27 | @Override
28 | public void fromText(Text text) {
29 | id = TextSerializerHelper.consumeLong(text, '\t');
30 | x = TextSerializerHelper.consumeDouble(text, '\t');
31 | y = TextSerializerHelper.consumeDouble(text, '\t');
32 | if (text.getLength() > 0)
33 | TextSerializerHelper.consumeMap(text, tags);
34 | }
35 |
36 | @Override
37 | public Text toText(Text text) {
38 | TextSerializerHelper.serializeLong(id, text, '\t');
39 | TextSerializerHelper.serializeDouble(x, text, '\t');
40 | TextSerializerHelper.serializeDouble(y, text, tags.isEmpty() ? '\0' : '\t');
41 | TextSerializerHelper.serializeMap(text, tags);
42 | return text;
43 | }
44 |
45 | @Override
46 | public void write(DataOutput out) throws IOException {
47 | out.writeLong(id);
48 | super.write(out);
49 | }
50 |
51 | @Override
52 | public void readFields(DataInput in) throws IOException {
53 | this.id = in.readLong();
54 | super.readFields(in);
55 | }
56 |
57 | @Override
58 | public Point clone() {
59 | OSMPoint c = new OSMPoint();
60 | c.id = id;
61 | c.x = x;
62 | c.y = y;
63 | c.tags = new HashMap(tags);
64 | return c;
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/util/MemoryReporter.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.util;
10 |
11 | import org.apache.commons.logging.Log;
12 | import org.apache.commons.logging.LogFactory;
13 |
14 | /**
15 | * Runs a background daemon thread that keeps reporting the memory usage of
16 | * the JVM
17 | * @author Ahmed Eldawy
18 | *
19 | */
20 | public class MemoryReporter implements Runnable {
21 | static final Log LOG = LogFactory.getLog(MemoryReporter.class);
22 |
23 | private String humanReadable(double size) {
24 | final String[] units = {"", "KB", "MB", "GB", "TB", "PB"};
25 | int unit = 0;
26 | while (unit < units.length && size > 1024) {
27 | size /= 1024;
28 | unit++;
29 | }
30 | return String.format("%.2f %s", size, units[unit]);
31 | }
32 |
33 | @Override
34 | public void run() {
35 | Runtime runtime = Runtime.getRuntime();
36 | while (true) {
37 | LOG.info(String.format("Free memory %s / Total memory %s",
38 | humanReadable(runtime.freeMemory()),
39 | humanReadable(runtime.totalMemory())));
40 | try {
41 | Thread.sleep(1000*60);
42 | } catch (InterruptedException e) {
43 | }
44 | }
45 | }
46 |
47 | public static Thread startReporting() {
48 | Thread thread = new Thread(new MemoryReporter(), "MemReporter");
49 | thread.setDaemon(true);
50 | thread.start();
51 | return thread;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/util/ResultCollectorSynchronizer.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.util;
10 |
11 | import edu.umn.cs.spatialHadoop.core.ResultCollector;
12 |
13 | /**
14 | * Builds a wrapper around an existing ResultCollector which
15 | * synchronizes all calls to the wrapped ResultCollector.
16 | * @author Ahmed Eldawy
17 | *
18 | */
19 | public class ResultCollectorSynchronizer implements ResultCollector {
20 |
21 | private ResultCollector wrapped;
22 |
23 | public ResultCollectorSynchronizer(ResultCollector wrapped) {
24 | this.wrapped = wrapped;
25 | }
26 |
27 | @Override
28 | public synchronized void collect(T t) {
29 | wrapped.collect(t);
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/util/WritableByteArray.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.util;
10 |
11 | import java.io.DataInput;
12 | import java.io.DataOutput;
13 | import java.io.IOException;
14 |
15 | import org.apache.hadoop.io.Writable;
16 |
17 | /**
18 | * @author Ahmed Eldawy
19 | *
20 | */
21 | public class WritableByteArray implements Writable {
22 | /**The buffer that holds the data*/
23 | private byte[] buffer;
24 | /**Number of correct bytes in the buffer*/
25 | private int length;
26 |
27 | public WritableByteArray() {
28 | }
29 |
30 | public WritableByteArray(byte[] b) {
31 | this.buffer = b;
32 | this.length = b.length;
33 | }
34 |
35 | @Override
36 | public void write(DataOutput out) throws IOException {
37 | out.writeInt(length);
38 | out.write(buffer, 0, length);
39 | }
40 |
41 | @Override
42 | public void readFields(DataInput in) throws IOException {
43 | this.length = in.readInt();
44 | if (this.buffer.length < this.length)
45 | this.buffer = new byte[length];
46 | in.readFully(buffer, 0, this.length);
47 | }
48 |
49 | public void set(byte[] b, int start, int end) {
50 | if (buffer.length < end - start)
51 | buffer = new byte[(end - start) * 2];
52 | if (b != buffer || start != 0)
53 | System.arraycopy(b, start, buffer, 0, end - start);
54 | this.length = end - start;
55 | }
56 |
57 | public int getLength() {
58 | return length;
59 | }
60 |
61 | public byte[] getBuffer() {
62 | return buffer;
63 | }
64 |
65 | public void write(byte[] b, int start, int end) {
66 | this.set(b, start, end);
67 | }
68 |
69 | public byte[] getData() {
70 | return buffer;
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/visualization/BinaryOutputFormat.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.visualization;
10 |
11 | import java.io.IOException;
12 |
13 | import org.apache.hadoop.conf.Configuration;
14 | import org.apache.hadoop.fs.FSDataOutputStream;
15 | import org.apache.hadoop.fs.FileSystem;
16 | import org.apache.hadoop.fs.Path;
17 | import org.apache.hadoop.io.Writable;
18 | import org.apache.hadoop.mapreduce.RecordWriter;
19 | import org.apache.hadoop.mapreduce.TaskAttemptContext;
20 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
21 |
22 | /**
23 | * Writes canvases as images to the output file
24 | * @author Ahmed Eldawy
25 | *
26 | */
27 | public class BinaryOutputFormat extends FileOutputFormat {
28 |
29 | /**
30 | * Writes canvases to a file
31 | * @author Ahmed Eldawy
32 | *
33 | */
34 | class BinaryRecordWriter extends RecordWriter {
35 | /**Plotter used to merge intermediate canvases*/
36 | private FSDataOutputStream out;
37 |
38 | public BinaryRecordWriter(FSDataOutputStream out) throws IOException {
39 | this.out = out;
40 | }
41 |
42 |
43 | @Override
44 | public void write(Writable key, Writable value) throws IOException {
45 | key.write(out);
46 | value.write(out);
47 | }
48 |
49 | @Override
50 | public void close(TaskAttemptContext context) throws IOException,
51 | InterruptedException {
52 | out.close();
53 | }
54 | }
55 |
56 | @Override
57 | public RecordWriter getRecordWriter(
58 | TaskAttemptContext job) throws IOException, InterruptedException {
59 | Configuration conf = job.getConfiguration();
60 | Path file = getDefaultWorkFile(job, "");
61 | FileSystem fs = file.getFileSystem(conf);
62 | FSDataOutputStream fileOut = fs.create(file, false);
63 | return new BinaryRecordWriter(fileOut);
64 | }
65 |
66 |
67 | }
68 |
--------------------------------------------------------------------------------
/src/main/java/edu/umn/cs/spatialHadoop/visualization/Canvas.java:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 | package edu.umn.cs.spatialHadoop.visualization;
10 |
11 | import java.awt.Point;
12 | import java.io.DataInput;
13 | import java.io.DataOutput;
14 | import java.io.IOException;
15 |
16 | import org.apache.hadoop.io.Writable;
17 |
18 | import edu.umn.cs.spatialHadoop.core.Rectangle;
19 |
20 | /**An abstract interface for any canvas*/
21 | public abstract class Canvas implements Writable {
22 | /**The MBR of the this layer in input coordinates*/
23 | protected Rectangle inputMBR;
24 |
25 | /**Width of this layer in pixels*/
26 | protected int width;
27 |
28 | /**Height of this layer in pixels*/
29 | protected int height;
30 |
31 | public Canvas() {}
32 |
33 | public Canvas(Rectangle inputMBR, int width, int height) {
34 | super();
35 | this.inputMBR = inputMBR;
36 | this.width = width;
37 | this.height = height;
38 | }
39 |
40 | @Override
41 | public void write(DataOutput out) throws IOException {
42 | inputMBR.getMBR().write(out);
43 | out.writeInt(width);
44 | out.writeInt(height);
45 | }
46 |
47 | @Override
48 | public void readFields(DataInput in) throws IOException {
49 | if (inputMBR == null)
50 | inputMBR = new Rectangle();
51 | inputMBR.readFields(in);
52 | width = in.readInt();
53 | height = in.readInt();
54 | }
55 |
56 | public Rectangle getInputMBR() {
57 | return inputMBR;
58 | }
59 |
60 | public int getWidth() {
61 | return width;
62 | }
63 |
64 | public int getHeight() {
65 | return height;
66 | }
67 |
68 | /**
69 | * Project a point from input space to image space.
70 | * @param x
71 | * @param y
72 | * @return
73 | */
74 | public Point projectToImageSpace(double x, double y) {
75 | // Calculate the offset of the intermediate layer in the final canvas based on its MBR
76 | Rectangle finalMBR = this.getInputMBR();
77 | int imageX = (int) Math.floor((x - finalMBR.x1) * this.getWidth() / finalMBR.getWidth());
78 | int imageY = (int) Math.floor((y - finalMBR.y1) * this.getHeight() / finalMBR.getHeight());
79 | return new Point(imageX, imageY);
80 | }
81 | }
--------------------------------------------------------------------------------
/src/main/package/bin/shadoop:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##########################################################################
4 | # Copyright (c) 2015 by Regents of the University of Minnesota.
5 | # All rights reserved. This program and the accompanying materials
6 | # are made available under the terms of the Apache License, Version 2.0 which
7 | # accompanies this distribution and is available at
8 | # http://www.opensource.org/licenses/apache2.0.php.
9 | #
10 | ##########################################################################
11 |
12 |
13 | bin=`dirname "$0"`
14 | bin=`cd "$bin" > /dev/null; pwd`
15 |
16 | # Call Hadoop with the operations.Main as the main class
17 | . "$bin"/hadoop edu.umn.cs.spatialHadoop.operations.Main $@
18 |
--------------------------------------------------------------------------------
/src/main/package/etc/hadoop/spatial-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/src/main/package/etc/hadoop/spatial-site.xml.template:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Set root logger level to DEBUG and its only appender to A1.
2 | log4j.rootLogger=INFO, A1
3 |
4 | # A1 is set to be a ConsoleAppender.
5 | log4j.appender.A1=org.apache.log4j.ConsoleAppender
6 |
7 | # A1 uses PatternLayout.
8 | log4j.appender.A1.layout=org.apache.log4j.PatternLayout
9 | log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
10 |
--------------------------------------------------------------------------------
/src/main/resources/spatial-default.yaml:
--------------------------------------------------------------------------------
1 | # Default configuration for SpatialHadoop
2 |
3 | # Short names for common shapes
4 | Shapes:
5 | point: edu.umn.cs.spatialHadoop.core.Point
6 | rect: edu.umn.cs.spatialHadoop.core.Rectangle
7 | wkt: edu.umn.cs.spatialHadoop.core.OGCJTSShape
8 | tiger: edu.umn.cs.spatialHadoop.TigerShape
9 | osm: edu.umn.cs.spatialHadoop.osm.OSMPolygon
10 | osmpoly: edu.umn.cs.spatialHadoop.osm.OSMPolygon
11 | osmpoint: edu.umn.cs.spatialHadoop.osm.OSMPoint
12 | ogc: edu.umn.cs.spatialHadoop.core.OGCJTSShape
13 | nasapoint: edu.umn.cs.spatialHadoop.nasa.NASAPoint
14 | nasarect: edu.umn.cs.spatialHadoop.nasa.NASARectangle
15 | csv: edu.umn.cs.spatialHadoop.core.CSVOGC
16 |
17 | # Short names for common global indexes
18 | GlobalIndexes:
19 | - edu.umn.cs.spatialHadoop.indexing.GridPartitioner
20 | - edu.umn.cs.spatialHadoop.indexing.STRPartitioner
21 | - edu.umn.cs.spatialHadoop.indexing.RTreeGBPartitioner
22 | - edu.umn.cs.spatialHadoop.indexing.AbstractRTreeGBPartitioner$RStarTreeGBPartitioner
23 | - edu.umn.cs.spatialHadoop.indexing.AbstractRTreeGBPartitioner$RRStarTreeGBPartitioner
24 | - edu.umn.cs.spatialHadoop.indexing.AbstractRTreeBBPartitioner$RTreeGuttmanBBPartitioner
25 | - edu.umn.cs.spatialHadoop.indexing.AbstractRTreeBBPartitioner$RStarTreeBBPartitioner
26 | - edu.umn.cs.spatialHadoop.indexing.AbstractRTreeBBPartitioner$RRStarTreeBBPartitioner
27 | - edu.umn.cs.spatialHadoop.indexing.KdTreePartitioner
28 | - edu.umn.cs.spatialHadoop.indexing.ZCurvePartitioner
29 | - edu.umn.cs.spatialHadoop.indexing.HilbertCurvePartitioner
30 | - edu.umn.cs.spatialHadoop.indexing.QuadTreePartitioner
31 |
32 | # Short names for common local indexes
33 | LocalIndexes:
34 | - edu.umn.cs.spatialHadoop.indexing.RRStarLocalIndex
35 |
36 | # Short name for spatial indexes that combine global, local, and disjoint
37 | SpatialIndexes:
38 | - short-name: grid
39 | gindex: grid
40 | disjoint: true
41 |
42 | - short-name: str
43 | gindex: str
44 |
45 | - short-name: str+
46 | gindex: str
47 | disjoint: true
48 |
49 | - short-name: rtree
50 | gindex: rstar
51 | lindex: rrstar
52 |
53 | - short-name: r+tree
54 | gindex: rstar
55 | lindex: rrstar
56 | disjoint: true
57 |
58 | - short-name: quadtree
59 | gindex: quadtree
60 | disjoint: true
61 |
62 | - short-name: kdtree
63 | gindex: kdtree
64 | disjoint: true
65 |
66 | - short-name: zcurve
67 | gindex: zcurve
68 |
69 | - short-name: hilbert
70 | gindex: hilbert
71 |
--------------------------------------------------------------------------------
/src/main/resources/spatial-operations.yaml:
--------------------------------------------------------------------------------
1 | # List of operations accessible from the command line
2 |
3 | # Short names for common shapes
4 | - edu.umn.cs.spatialHadoop.indexing.Indexer
5 | - edu.umn.cs.spatialHadoop.indexing.IndexInsert
6 | - edu.umn.cs.spatialHadoop.operations.RangeQuery
7 | - edu.umn.cs.spatialHadoop.operations.KNN
8 | - edu.umn.cs.spatialHadoop.operations.SJMR
9 | - edu.umn.cs.spatialHadoop.operations.DistributedJoin
10 | - edu.umn.cs.spatialHadoop.operations.FileMBR
11 | - edu.umn.cs.spatialHadoop.operations.Sampler
12 | - edu.umn.cs.spatialHadoop.operations.RandomSpatialGenerator
13 | - edu.umn.cs.spatialHadoop.operations.Union
14 | - edu.umn.cs.spatialHadoop.operations.UltimateUnion
15 | - edu.umn.cs.spatialHadoop.operations.Skyline
16 | - edu.umn.cs.spatialHadoop.operations.ConvexHull
17 | - edu.umn.cs.spatialHadoop.operations.FarthestPair
18 | - edu.umn.cs.spatialHadoop.operations.ClosestPair
19 | - edu.umn.cs.spatialHadoop.operations.DistributedCopy
20 | - edu.umn.cs.spatialHadoop.ReadFile
21 | - edu.umn.cs.spatialHadoop.delaunay.DelaunayTriangulation
22 | - edu.umn.cs.spatialHadoop.nasa.MultiHDFPlot
23 | - edu.umn.cs.spatialHadoop.nasa.HDFPlot
24 | - edu.umn.cs.spatialHadoop.visualization.GeometricPlot
25 | - edu.umn.cs.spatialHadoop.visualization.HeatMapPlot
26 | - edu.umn.cs.spatialHadoop.nasa.ShahedServer
27 | - edu.umn.cs.spatialHadoop.visualization.HadoopvizServer
28 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/hdfs.old/head.jsp:
--------------------------------------------------------------------------------
1 | <%@ page
2 | contentType="text/html; charset=UTF-8"
3 | import="org.apache.hadoop.fs.*"
4 | import="org.apache.hadoop.io.LongWritable"
5 | import="org.apache.hadoop.io.Text"
6 | import="edu.umn.cs.spatialHadoop.core.SpatialSite"
7 | import="java.io.BufferedReader"
8 | import="org.apache.hadoop.http.HtmlQuoting"
9 | import="org.apache.hadoop.hdfs.server.namenode.JspHelper"
10 | import="org.apache.hadoop.conf.Configuration"
11 | import="org.apache.hadoop.mapred.LineRecordReader"
12 | import="org.apache.hadoop.mapred.FileSplit"
13 | %>
14 |
15 | <%! private static final long serialVersionUID = 1L;%>
16 | <%! static JspHelper jspHelper = new JspHelper(); %>
17 |
18 | <%
19 | Configuration conf = (Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
20 | String paramFile = request.getParameter("file");
21 | String paramLines = request.getParameter("lines");
22 | int numLines = paramLines == null? 10 : Integer.parseInt(paramLines);
23 | if (paramFile != null) {
24 | Path filePath = new Path(HtmlQuoting.unquoteHtmlChars(paramFile));
25 | FileSystem fs = filePath.getFileSystem(conf);
26 | if (fs.getFileStatus(filePath).isDir()) {
27 | // Directory selected, head one of the data files (any non hidden file)
28 | FileStatus[] dataFiles = fs.listStatus(filePath, SpatialSite.NonHiddenFileFilter);
29 | if (dataFiles.length == 0) {
30 | response.sendError(404, "No data files");
31 | filePath = null;
32 | } else {
33 | filePath = dataFiles[0].getPath();
34 | }
35 | }
36 | if (filePath != null) {
37 | LineRecordReader reader = new LineRecordReader(conf, new FileSplit(filePath, 0, 4096 * numLines, new String[0]));
38 | Text line = new Text();
39 | LongWritable offset = new LongWritable();
40 | while (numLines-- > 0 && reader.next(offset, line)) {
41 | out.println(line);
42 | }
43 | reader.close();
44 | }
45 | }
46 | %>
--------------------------------------------------------------------------------
/src/main/resources/webapps/hdfs.old/preview-script.jsp:
--------------------------------------------------------------------------------
1 | <%@ page
2 | contentType="text/html; charset=UTF-8"
3 | import="org.apache.hadoop.fs.*"
4 | import="org.apache.hadoop.io.LongWritable"
5 | import="org.apache.hadoop.io.Text"
6 | import="java.io.File"
7 | import="java.io.FileInputStream"
8 | import="java.io.InputStream"
9 | import="java.io.FilenameFilter"
10 | import="java.io.PrintStream"
11 | import="edu.umn.cs.spatialHadoop.core.SpatialSite"
12 | import="java.io.BufferedReader"
13 | import="org.apache.hadoop.http.HtmlQuoting"
14 | import="org.apache.hadoop.hdfs.server.namenode.JspHelper"
15 | import="edu.umn.cs.spatialHadoop.util.JspSpatialHelper"
16 | import="org.apache.hadoop.conf.Configuration"
17 | import="org.apache.hadoop.mapred.LineRecordReader"
18 | import="org.apache.hadoop.mapred.FileSplit"
19 | %>
20 |
21 | <%! private static final long serialVersionUID = 1L;%>
22 | <%! static JspHelper jspHelper = new JspHelper(); %>
23 |
24 | <%
25 | Configuration conf = (Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
26 | String pigeonTempDir = conf.get("pigeon.tmp", ".");
27 | String strScriptId = request.getParameter("id");
28 | String part = request.getParameter("part");
29 | if (strScriptId == null) {
30 | response.sendError(500, "Parameter 'id' missing");
31 | } else {
32 | int scriptId = Integer.parseInt(strScriptId);
33 | File scriptDir = new File(pigeonTempDir, String.format("pigeon_%04d", scriptId));
34 | if (!scriptDir.isDirectory()) {
35 | response.sendError(404, "Script #"+scriptId+" does not exist in path '"+scriptDir+"'");
36 | } else {
37 | if (part.equals("body")) {
38 | // Retrieve script body (Pig script)
39 | File scriptBodyFile = new File(scriptDir, "script.pig");
40 | byte[] scriptBodyBytes = new byte[(int)scriptBodyFile.length()];
41 | InputStream in = new FileInputStream(scriptBodyFile);
42 | in.read(scriptBodyBytes);
43 | in.close();
44 |
45 | out.print(new String(scriptBodyBytes));
46 | } else if (part.equals("log")) {
47 | // Retrieve script progress from log
48 | String[] logFile = scriptDir.list(new FilenameFilter() {
49 | public boolean accept(File dir, String name) {
50 | return name.toLowerCase().endsWith("log");
51 | }
52 | });
53 | if (logFile.length > 0) {
54 | File scriptLogFile = new File(scriptDir, logFile[0]);
55 | byte[] scriptLogBytes = new byte[(int)scriptLogFile.length()];
56 | InputStream in = new FileInputStream(scriptLogFile);
57 | in.read(scriptLogBytes);
58 | in.close();
59 |
60 | out.print(new String(scriptLogBytes));
61 | } else {
62 | response.sendError(404, "Log file not found");
63 | }
64 | }
65 | }
66 | }
67 |
68 | %>
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/css/hadoopviz.css:
--------------------------------------------------------------------------------
1 | #current-path {width: 80%;}
2 |
3 | #global-index svg {}
4 | #global-index svg rect {fill: none; stroke: black; stroke-width: 0.5px;}
5 |
6 | #footer { width: 100%; border-top: 1px blue solid}
7 |
8 | .ext {color: darkred;}
9 |
10 | .ext.txt:before, .ext.log:before
11 | { content: "\f0f6"}
12 |
13 | .ext.png:before, .ext.gif:before, .ext.jpg:before, .ext.svg:before
14 | { content: "\f1c5"}
15 |
16 | .ext.bz2:before, .ext.gz:before, .ext.zip:before, .ext.jar:before
17 | { content: "\f1c6"}
18 |
19 | .ext.xml:before, .ext.md:before, .ext.ini:before
20 | { content: "\f1c9"}
21 |
22 | .ext.html:before
23 | { content: "\f0ac"}
24 |
25 | .ext.exe:before
26 | { content: "\f085"}
27 |
28 | .ext.csv:before
29 | { content: "\f0ce"}
30 |
31 | .ext.sh:before, .ext.cmd:before, .ext.bat:before
32 | { content: "\f120"}
33 |
34 | .ext.pdf:before
35 | { content: "\f1c1"}
36 |
37 | .ext.doc:before, .ext.docx:before
38 | { content: "\f1c2"}
39 |
40 | .ext.xls:before, .ext.xlsx:before
41 | { content: "\f1c3"}
42 |
43 | .ext:before
44 | { content: "\f016"}
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/css/responstable.css:
--------------------------------------------------------------------------------
1 | /*
2 |
3 | RESPONSTABLE 2.0 by jordyvanraaij
4 | Designed mobile first!
5 |
6 | If you like this solution, you might also want to check out the 1.0 version:
7 | https://gist.github.com/jordyvanraaij/9069194
8 |
9 | */
10 | .responstable {
11 | margin: 1em 0;
12 | width: 100%;
13 | overflow: hidden;
14 | background: #FFF;
15 | color: #024457;
16 | border-radius: 10px;
17 | border: 1px solid #167F92;
18 | }
19 | .responstable tr {
20 | border: 1px solid #D9E4E6;
21 | }
22 | .responstable tr:nth-child(odd) {
23 | background-color: #EAF3F3;
24 | }
25 | .responstable th {
26 | display: none;
27 | border: 1px solid #FFF;
28 | background-color: #167F92;
29 | color: #FFF;
30 | padding: 1em;
31 | }
32 | .responstable th:first-child {
33 | display: table-cell;
34 | text-align: center;
35 | }
36 | .responstable th:nth-child(2) {
37 | display: table-cell;
38 | }
39 | .responstable th:nth-child(2) span {
40 | display: none;
41 | }
42 | .responstable th:nth-child(2):after {
43 | content: attr(data-th);
44 | }
45 | @media (min-width: 480px) {
46 | .responstable th:nth-child(2) span {
47 | display: block;
48 | }
49 | .responstable th:nth-child(2):after {
50 | display: none;
51 | }
52 | }
53 | .responstable td {
54 | display: block;
55 | word-wrap: break-word;
56 | max-width: 7em;
57 | }
58 | .responstable td:first-child {
59 | display: table-cell;
60 | text-align: center;
61 | border-right: 1px solid #D9E4E6;
62 | }
63 | @media (min-width: 480px) {
64 | .responstable td {
65 | border: 1px solid #D9E4E6;
66 | }
67 | }
68 | .responstable th, .responstable td {
69 | text-align: left;
70 | margin: .5em 1em;
71 | }
72 | @media (min-width: 480px) {
73 | .responstable th, .responstable td {
74 | display: table-cell;
75 | padding: 1em;
76 | }
77 | }
78 |
79 | body {
80 | padding: 0 2em;
81 | font-family: Arial, sans-serif;
82 | color: #024457;
83 | background: #f2f2f2;
84 | }
85 |
86 | h1 {
87 | font-family: Verdana;
88 | font-weight: normal;
89 | color: #024457;
90 | }
91 | h1 span {
92 | color: #167F92;
93 | }
94 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/fonts/FontAwesome.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/hadoopviz/fonts/FontAwesome.otf
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.eot
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.ttf
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.woff
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/hadoopviz/fonts/fontawesome-webfont.woff2
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/hadoopviz/js/display.js:
--------------------------------------------------------------------------------
1 | function display() {
2 | var outputPath = $('input[name="output"]:checked').val();
3 | var requestURL = "cgi-bin/fetch_result.cgi?path=" + outputPath;
4 | var requestInfoURL = "cgi-bin/get_output_info.cgi?path=" + outputPath;
5 | jQuery.ajax(requestInfoURL, {success: function(response) {
6 | document.getElementById("statistics").innerHTML = " Statistics
Input Size: "
7 | + response.inputSize + ".
" +
8 | "Intermediate Size: " + response.intermediateSize + ".
" +
9 | "Intermediate Groups: " + response.intermediateGroup+ ".
" +
10 | "Job ID: " + response.jobID+ ".
" +
11 | "Job URL: " + response.jobURL+ ".
" +
12 | ;
13 | }});
14 | jQuery.ajax(requestURL, {success: function(response) {
15 | document.getElementById("result").innerHTML = response;
16 | }});
17 | }
18 |
19 | function refresh() {
20 | var requestURL = "cgi-bin/generate_output_list.cgi";
21 | jQuery.ajax(requestURL, {success: function(response) {
22 | var index;
23 | var text = "";
24 | for (index = 0; index < response.length; index++) {
25 | text += "" + response[index] + "
";
26 | }
27 | document.getElementById("output").innerHTML = text;
28 | }});
29 | }
30 |
31 | $(function () {
32 | var requestURL = "cgi-bin/generate_output_list.cgi";
33 | jQuery.ajax(requestURL, {success: function(response) {
34 | var index;
35 | var text = "";
36 | for (index = 0; index < response.length; index++) {
37 | text += "" + response[index] + "
";
38 | }
39 | document.getElementById("output").innerHTML = text;
40 | }});
41 | })
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/pigeon/pigeon.js:
--------------------------------------------------------------------------------
1 | $(document).ready(function() {
2 | function headSelected() {
3 | var fselector = $("#file-selector");
4 | var selected = fselector.find(":selected");
5 | if (selected.size() == 0) {
6 | $('#preview-head').html('not available');
7 | } else {
8 | $.ajax({
9 | type: "GET",
10 | url: "/head.jsp",
11 | data: {
12 | file: selected.val(),
13 | lines: 10,
14 | },
15 | success: function(response) {
16 | $('#preview-head').html(response.trim());
17 | }, error: function(xhr, error) {
18 | $('#preview-head').html('not available');
19 | //alert(xhr.responseText);
20 | }
21 | });
22 | }
23 | }
24 |
25 | $("#file-selector").change(headSelected);
26 | $("#file-selector").dblclick(function() {
27 | var fselector = $("#file-selector");
28 | var selected = fselector.find(":selected");
29 | if (selected.size() > 0) {
30 | window.location.href = "pigeon.jsp?dir="+selected.val();
31 | }
32 | });
33 |
34 | // For initial case
35 | if (!run_script)
36 | headSelected();
37 |
38 | function previewScript() {
39 | var scriptSelector = $("#pigeon-scripts");
40 | var selected = scriptSelector.find(":selected");
41 | if (selected.size() == 0) {
42 | $('#preview-head').html('not available');
43 | } else {
44 | $('#script-name').val(selected.text());
45 |
46 | // Retrieve script body
47 | $.ajax({
48 | type: "GET",
49 | url: "/preview-script.jsp",
50 | data: {
51 | id: selected.val(),
52 | part: "body"
53 | },
54 | success: function(response) {
55 | $('#script-body').val(response.trim());
56 | }, error: function(xhr, error) {
57 | $('#script-body').val('not available');
58 | alert(xhr.responseText);
59 | }
60 | });
61 |
62 | // Retrieve script log
63 | $.ajax({
64 | type: "GET",
65 | url: "/preview-script.jsp",
66 | data: {
67 | id: selected.val(),
68 | part: "log"
69 | },
70 | success: function(response) {
71 | $('#preview-head').html(response.trim());
72 | }, error: function(xhr, error) {
73 | $('#preview-head').html(xhr.responseText);
74 | }
75 | });
76 | }
77 |
78 | }
79 |
80 | $('#pigeon-scripts').change(previewScript);
81 | });
82 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/pigeon/pigeon_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/pigeon/pigeon_logo.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/d3/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/d3/.DS_Store
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/d3/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2012, Michael Bostock
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | * The name Michael Bostock may not be used to endorse or promote products
15 | derived from this software without specific prior written permission.
16 |
17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 | DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
21 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
22 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
25 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
26 | EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/d3/d3.silder.css:
--------------------------------------------------------------------------------
1 | .d3-slider {
2 | position: relative;
3 | font-family: Verdana,Arial,sans-serif;
4 | font-size: 1.1em;
5 | border: 1px solid #aaaaaa;
6 | z-index: 2;
7 | }
8 |
9 | .d3-slider-horizontal {
10 | height: .8em;
11 | }
12 |
13 | .d3-slider-range {
14 | background:#2980b9;
15 | left:0px;
16 | right:0px;
17 | height: 0.8em;
18 | position: absolute;
19 | }
20 |
21 | .d3-slider-range-vertical {
22 | background:#2980b9;
23 | left:0px;
24 | right:0px;
25 | position: absolute;
26 | top:0;
27 | }
28 |
29 | .d3-slider-vertical {
30 | width: .8em;
31 | height: 100px;
32 | }
33 |
34 | .d3-slider-handle {
35 | position: absolute;
36 | width: 1.2em;
37 | height: 1.2em;
38 | border: 1px solid #d3d3d3;
39 | border-radius: 4px;
40 | background: #eee;
41 | background: linear-gradient(to bottom, #eee 0%, #ddd 100%);
42 | z-index: 3;
43 | }
44 |
45 | .d3-slider-handle:hover {
46 | border: 1px solid #999999;
47 | }
48 |
49 | .d3-slider-horizontal .d3-slider-handle {
50 | top: -.3em;
51 | margin-left: -.6em;
52 | }
53 |
54 | .d3-slider-axis {
55 | position: relative;
56 | z-index: 1;
57 | }
58 |
59 | .d3-slider-axis-bottom {
60 | top: .8em;
61 | }
62 |
63 | .d3-slider-axis-right {
64 | left: .8em;
65 | }
66 |
67 | .d3-slider-axis path {
68 | stroke-width: 0;
69 | fill: none;
70 | }
71 |
72 | .d3-slider-axis line {
73 | fill: none;
74 | stroke: #aaa;
75 | shape-rendering: crispEdges;
76 | }
77 |
78 | .d3-slider-axis text {
79 | font-size: 11px;
80 | }
81 |
82 | .d3-slider-vertical .d3-slider-handle {
83 | left: -.25em;
84 | margin-left: 0;
85 | margin-bottom: -.6em;
86 | }
87 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/data/.DS_Store
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/data-alt.tsv:
--------------------------------------------------------------------------------
1 | date close
2 | 10-May-12 99.55
3 | 8-May-12 76.86
4 | 6-May-12 67.62
5 | 4-May-12 64.48
6 | 2-May-12 60.98
7 | 1-May-12 58.13
8 | 30-Apr-12 53.98
9 | 27-Apr-12 67.00
10 | 26-Apr-12 89.70
11 | 25-Apr-12 99.00
12 | 24-Apr-12 90.28
13 | 23-Apr-12 106.70
14 | 20-Apr-12 94.98
15 | 19-Apr-12 85.44
16 | 18-Apr-12 73.34
17 | 17-Apr-12 53.70
18 | 16-Apr-12 50.13
19 | 13-Apr-12 65.23
20 | 12-Apr-12 62.77
21 | 11-Apr-12 66.20
22 | 10-Apr-12 68.44
23 | 9-Apr-12 66.23
24 | 5-Apr-12 63.68
25 | 4-Apr-12 64.31
26 | 3-Apr-12 69.32
27 | 2-Apr-12 61.63
28 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/data.csv:
--------------------------------------------------------------------------------
1 | date,close
2 | 1-May-12,58.13
3 | 30-Apr-12,53.98
4 | 27-Apr-12,67.00
5 | 26-Apr-12,89.70
6 | 25-Apr-12,99.00
7 | 24-Apr-12,130.28
8 | 23-Apr-12,166.70
9 | 20-Apr-12,234.98
10 | 19-Apr-12,345.44
11 | 18-Apr-12,443.34
12 | 17-Apr-12,543.70
13 | 16-Apr-12,580.13
14 | 13-Apr-12,605.23
15 | 12-Apr-12,622.77
16 | 11-Apr-12,626.20
17 | 10-Apr-12,628.44
18 | 9-Apr-12,636.23
19 | 5-Apr-12,633.68
20 | 4-Apr-12,624.31
21 | 3-Apr-12,629.32
22 | 2-Apr-12,618.63
23 | 30-Mar-12,599.55
24 | 29-Mar-12,609.86
25 | 28-Mar-12,617.62
26 | 27-Mar-12,614.48
27 | 26-Mar-12,606.98
28 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/data.tsv:
--------------------------------------------------------------------------------
1 | date close
2 | 1-May-12 58.13
3 | 30-Apr-12 53.98
4 | 27-Apr-12 67.00
5 | 26-Apr-12 89.70
6 | 25-Apr-12 99.00
7 | 24-Apr-12 130.28
8 | 23-Apr-12 166.70
9 | 20-Apr-12 234.98
10 | 19-Apr-12 345.44
11 | 18-Apr-12 443.34
12 | 17-Apr-12 543.70
13 | 16-Apr-12 580.13
14 | 13-Apr-12 605.23
15 | 12-Apr-12 622.77
16 | 11-Apr-12 626.20
17 | 10-Apr-12 628.44
18 | 9-Apr-12 636.23
19 | 5-Apr-12 633.68
20 | 4-Apr-12 624.31
21 | 3-Apr-12 629.32
22 | 2-Apr-12 618.63
23 | 30-Mar-12 599.55
24 | 29-Mar-12 609.86
25 | 28-Mar-12 617.62
26 | 27-Mar-12 614.48
27 | 26-Mar-12 606.98
28 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/data2.csv:
--------------------------------------------------------------------------------
1 | 1-May-12,58.13,34.12
2 | 30-Apr-12,53.98,45.56
3 | 27-Apr-12,67.00,67.89
4 | 26-Apr-12,89.70,78.54
5 | 25-Apr-12,99.00,89.23
6 | 24-Apr-12,130.28,99.23
7 | 23-Apr-12,166.70,101.34
8 | 20-Apr-12,234.98,122.34
9 | 19-Apr-12,345.44,134.56
10 | 18-Apr-12,443.34,160.45
11 | 17-Apr-12,543.70,180.34
12 | 16-Apr-12,580.13,210.23
13 | 13-Apr-12,605.23,223.45
14 | 12-Apr-12,622.77,201.56
15 | 11-Apr-12,626.20,212.67
16 | 10-Apr-12,628.44,310.45
17 | 9-Apr-12,636.23,350.45
18 | 5-Apr-12,633.68,410.23
19 | 4-Apr-12,624.31,430.56
20 | 3-Apr-12,629.32,460.34
21 | 2-Apr-12,618.63,510.34
22 | 30-Mar-12,599.55,534.23
23 | 29-Mar-12,609.86,578.23
24 | 28-Mar-12,617.62,590.12
25 | 27-Mar-12,614.48,560.34
26 | 26-Mar-12,606.98,580.12
27 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/data2.tsv:
--------------------------------------------------------------------------------
1 | date close open
2 | 1-May-12 58.13 3.41
3 | 30-Apr-12 53.98 4.55
4 | 27-Apr-12 67.00 6.78
5 | 26-Apr-12 89.70 7.85
6 | 25-Apr-12 99.00 8.92
7 | 24-Apr-12 130.28 9.92
8 | 23-Apr-12 166.70 10.13
9 | 20-Apr-12 234.98 12.23
10 | 19-Apr-12 345.44 13.45
11 | 18-Apr-12 443.34 16.04
12 | 17-Apr-12 543.70 18.03
13 | 16-Apr-12 580.13 21.02
14 | 13-Apr-12 605.23 22.34
15 | 12-Apr-12 622.77 20.15
16 | 11-Apr-12 626.20 21.26
17 | 10-Apr-12 628.44 31.04
18 | 9-Apr-12 636.23 35.04
19 | 5-Apr-12 633.68 41.02
20 | 4-Apr-12 624.31 43.05
21 | 3-Apr-12 629.32 46.03
22 | 2-Apr-12 618.63 51.03
23 | 30-Mar-12 599.55 53.42
24 | 29-Mar-12 609.86 57.82
25 | 28-Mar-12 617.62 59.01
26 | 27-Mar-12 614.48 56.03
27 | 26-Mar-12 606.98 58.01
28 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/sankey-formatted.json:
--------------------------------------------------------------------------------
1 | {
2 | "nodes":[
3 | {"node":0,"name":"node0"},
4 | {"node":1,"name":"node1"},
5 | {"node":2,"name":"node2"},
6 | {"node":3,"name":"node3"},
7 | {"node":4,"name":"node4"}
8 | ],
9 | "links":[
10 | {"source":0,"target":2,"value":2},
11 | {"source":1,"target":2,"value":2},
12 | {"source":1,"target":3,"value":2},
13 | {"source":0,"target":4,"value":2},
14 | {"source":2,"target":3,"value":2},
15 | {"source":2,"target":4,"value":2},
16 | {"source":3,"target":4,"value":4}
17 | ]}
18 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/data/sankey.csv:
--------------------------------------------------------------------------------
1 | source,target,value
2 | Barry,Elvis,2
3 | Frodo,Elvis,2
4 | Frodo,Sarah,2
5 | Barry,Alice,2
6 | Elvis,Sarah,2
7 | Elvis,Alice,2
8 | Sarah,Alice,4
9 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/images/wait.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/images/wait.gif
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SHAHED
6 |
7 |
8 |
9 |
10 |
15 |
16 |
17 |
18 |
19 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_diagonals-thick_18_b81900_40x40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_diagonals-thick_18_b81900_40x40.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_diagonals-thick_20_666666_40x40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_diagonals-thick_20_666666_40x40.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_flat_10_000000_40x100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_flat_10_000000_40x100.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_100_f6f6f6_1x400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_100_f6f6f6_1x400.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_100_fdf5ce_1x400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_100_fdf5ce_1x400.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_65_ffffff_1x400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_glass_65_ffffff_1x400.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_gloss-wave_35_f6a828_500x100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_gloss-wave_35_f6a828_500x100.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_222222_256x240.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_222222_256x240.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_228ef1_256x240.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_228ef1_256x240.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ef8c08_256x240.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ef8c08_256x240.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ffd27a_256x240.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ffd27a_256x240.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ffffff_256x240.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/shahedfrontend/jquery-ui/images/ui-icons_ffffff_256x240.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/shahedfrontend/respe.html:
--------------------------------------------------------------------------------
1 | {"results":{"points":[{"lat":24.000000, "lon":33.000000, "timestamp":"11-02-2013", "value":"15513"},{"lat":24.000000, "lon":33.000000, "timestamp":"22-08-2013", "value":"16295"},{"lat":24.000000, "lon":33.000000, "timestamp":"26-03-2013", "value":"16309"},{"lat":24.000000, "lon":33.000000, "timestamp":"12-02-2013", "value":"15488"},{"lat":24.000000, "lon":33.000000, "timestamp":"13-02-2013", "value":"15295"},{"lat":24.000000, "lon":33.000000, "timestamp":"07-07-2013", "value":"16357"},{"lat":24.000000, "lon":33.000000, "timestamp":"14-02-2013", "value":"15778"},{"lat":24.000000, "lon":33.000000, "timestamp":"01-01-2013", "value":"15217"},{"lat":24.000000, "lon":33.000000, "timestamp":"19-05-2013", "value":"15960"},{"lat":24.000000, "lon":33.000000, "timestamp":"15-11-2013", "value":"15736"},{"lat":24.000000, "lon":33.000000, "timestamp":"03-10-2013", "value":"16203"},{"lat":24.000000, "lon":33.000000, "timestamp":"15-02-2013", "value":"15535"},{"lat":24.000000, "lon":33.000000, "timestamp":"18-11-2013", "value":"15454"},{"lat":24.000000, "lon":33.000000, "timestamp":"21-05-2013", "value":"16335"},{"lat":24.000000, "lon":33.000000, "timestamp":"09-07-2013", "value":"16077"},{"lat":24.000000, "lon":33.000000, "timestamp":"17-02-2013", "value":"15562"},{"lat":24.000000, "lon":33.000000, "timestamp":"23-08-2013", "value":"16219"},{"lat":24.000000, "lon":33.000000, "timestamp":"02-01-2013", "value":"15233"},{"lat":24.000000, "lon":33.000000, "timestamp":"22-05-2013", "value":"16578"},{"lat":24.000000, "lon":33.000000, "timestamp":"29-03-2013", "value":"16249"},{"lat":24.000000, "lon":33.000000, "timestamp":"18-02-2013", "value":"15551"},{"lat":24.000000, "lon":33.000000, "timestamp":"19-11-2013", "value":"15537"},{"lat":24.000000, "lon":33.000000, "timestamp":"20-11-2013", "value":"15546"},{"lat":24.000000, "lon":33.000000, "timestamp":"30-03-2013", "value":"16321"},{"lat":24.000000, "lon":33.000000, "timestamp":"06-10-2013", "value":"15994"},{"lat":24.000000, "lon":33.000000, "timestamp":"07-10-2013", "value":"16017"},{"lat":24.000000, "lon":33.000000, "timestamp":"20-02-2013", "value":"15719"},{"lat":24.000000, "lon":33.000000, "timestamp":"04-01-2013", "value":"15298"},{"lat":24.000000, "lon":33.000000, "timestamp":"24-08-2013", "value":"16103"},{"lat":24.000000, "lon":33.000000, "timestamp":"26-08-2013", "value":"16325"},{"lat":24.000000, "lon":33.000000, "timestamp":"31-03-2013", "value":"16072"},{"lat":24.000000, "lon":33.000000, "timestamp":"21-11-2013", "value":"15625"},{"lat":24.000000, "lon":33.000000, "timestamp":"10-10-2013", "value":"16125"},{"lat":24.000000, "lon":33.000000, "timestamp":"24-11-2013", "value":"15924"},{"lat":24.000000, "lon":33.000000, "timestamp":"23-05-2013", "value":"16499"},],"result-size":0},"stats":{"totaltime":11577,"num-of-temporal-partitions":282,"num-of-trees":0}}
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/add.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/add.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/areawater.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/areawater.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/cells.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/cells.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/delete.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/delete.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/frontend.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/frontend.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/frontend_progress.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/frontend_progress.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/frontend_sjoin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/frontend_sjoin.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/roads_rivers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/roads_rivers.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/shadoop_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aseldawy/spatialhadoop2/225ddf51118313390d3aaf663032d4ccf0b3f1a1/src/main/resources/webapps/static/visualizer/shadoop_logo.png
--------------------------------------------------------------------------------
/src/main/resources/webapps/static/visualizer/visualizer.css:
--------------------------------------------------------------------------------
1 | .toolbar div { float: left; width: 32px; height: 32px; border: solid 1px black; }
2 | .operations div { float: left; height: 32px; border: solid 1px black; }
3 |
4 | .dialog {
5 | position: absolute;
6 | left: 300px;
7 | top: 150px;
8 | z-index: 100;
9 | background-color: lightgrey;
10 | border: solid 2px black;
11 | display: none;
12 | }
13 | .dialog .title {
14 | width: 100%;
15 | height: 32px;
16 | background-color: lightblue;
17 | text-align: center;
18 | border-bottom: solid 1px black;
19 | }
20 | .dialog .label {
21 | width: 120px;
22 | float: left;
23 | text-align: center;
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/resources/zoom_view.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Image map types
6 |
13 |
14 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/src/main/scripts/pig/edge_node_join.pig:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 |
10 | nodes = LOAD '/node.csv' USING PigStorage(',')
11 | AS (node_id:long, lat:double, lon:double);
12 |
13 | edges = LOAD '/road_edges.csv' USING PigStorage(',')
14 | AS (edge_id:long, node_id1:long, node_id2:long, way_id:long, tags);
15 |
16 | edges_join1 = JOIN edges BY node_id1,
17 | nodes BY node_id;
18 |
19 | edges_join2 = JOIN edges_join1 BY node_id2,
20 | nodes BY node_id;
21 |
22 | edges_join = FOREACH edges_join2
23 | GENERATE edge_id AS edge_id,
24 | node_id1 AS node_id1, edges_join1::nodes::lat AS lat1, edges_join1::nodes::lon AS lon1,
25 | node_id2 AS node_id2, nodes::lat AS lat2, nodes::lon AS lon2,
26 | way_id AS way_id, tags AS tags;
27 |
28 | STORE edges_join INTO 'edges_join.csv' USING PigStorage(',');
--------------------------------------------------------------------------------
/src/main/scripts/pig/splitter.pig:
--------------------------------------------------------------------------------
1 | /***********************************************************************
2 | * Copyright (c) 2015 by Regents of the University of Minnesota.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the Apache License, Version 2.0 which
5 | * accompanies this distribution and is available at
6 | * http://www.opensource.org/licenses/apache2.0.php.
7 | *
8 | *************************************************************************/
9 |
10 | REGISTER osmx.jar;
11 | REGISTER pigeon.jar
12 | REGISTER esri-geometry-api-1.0.jar;
13 |
14 | IMPORT 'pigeon_import.pig';
15 |
16 | all_ways = LOAD '/all_ways.tsv.gz' AS (id:long, geometry, tags:map [chararray]);
17 |
18 | roads = FILTER all_ways BY tags#'highway' IS NOT NULL;
19 | STORE roads INTO 'roads.tsv';
20 |
21 | rivers = FILTER all_ways BY tags#'waterway' == 'river';
22 | STORE rivers INTO 'rivers.tsv';
23 |
24 | lakes = FILTER all_ways BY tags#'natural' == 'water';
25 | STORE lakes INTO 'lakes.tsv';
26 |
27 | cities = FILTER all_ways BY tags#'boundary' == 'administrative'
28 | AND (INT)tags#'admin_level' >= 4;
29 | STORE cities INTO 'cities.tsv';
30 |
31 | parks = FILTER all_ways BY tags#'leisure' == 'park';
32 | STORE parks INTO 'parks.tsv';
33 |
--------------------------------------------------------------------------------
/src/main/scripts/ruby/exp_dynindex.rb:
--------------------------------------------------------------------------------
1 | input_path = "/user/tvu032/sorted_osm_32mb/sorted_all_nodes_4G"
2 | index_path = "dynindex"
3 |
4 | batches = `hdfs dfs -ls #{input_path}`.each_line.map { |line| line.split[7]}
5 | batches.delete(nil)
6 | batches = batches.sort_by {|f| f.split('_')[-1].to_i }
7 |
8 | `hdfs dfs -rm -r #{index_path}`
9 |
10 | for batch in batches
11 | batch_num = batch.split('_')[-1].to_i
12 | output = `hadoop jar spatialhadoop-2.4.3-SNAPSHOT-uber.jar insert shape:osmpoint gindex:rstar #{batch} #{index_path}`
13 | puts "batch: #{batch}"
14 | puts output
15 | `hdfs dfs -get dynindex/_rstar.wkt dynindex_#{batch_num}.wkt`
16 | `hdfs dfs -get dynindex/_master.rstar dynindex_#{batch_num}.rstar`
17 | end
--------------------------------------------------------------------------------
/src/main/scripts/ruby/index_measure.rb:
--------------------------------------------------------------------------------
1 | # Measure the quality of the index given by the input partitions
2 |
3 | partitions = $stdin.each_line.map do |line|
4 | parts = line.split ','
5 | {
6 | :id => parts[0].to_i,
7 | :mbr => parts[1..4].map(&:to_f),
8 | :num_records => parts[-3].to_i,
9 | :num_bytes => parts[-2].to_i,
10 | :filename => parts[-1]
11 | }
12 | end
13 |
14 | query_width = 0.1
15 | query_height = 0.1
16 |
17 | # Measure the quality of the index as the expected number of bytes to process
18 | # for a query of the given size query_width x query_height
19 | expected_bytes = 0.0
20 |
21 | all_mbrs = partitions.map{|p| p[:mbr]}
22 | overall_mbr = [ all_mbrs.map{|mbr| mbr[0]}.min,
23 | all_mbrs.map{|mbr| mbr[1]}.min,
24 | all_mbrs.map{|mbr| mbr[2]}.max,
25 | all_mbrs.map{|mbr| mbr[3]}.max]
26 |
27 | overall_mbr_area = (overall_mbr[2] - overall_mbr[0]) - (overall_mbr[3] - overall_mbr[1])
28 |
29 | for partition in partitions
30 | # Compute the probability of this partition being selected
31 | partition_mbr = partition[:mbr]
32 | mbr_width = partition_mbr[2] - partition_mbr[0]
33 | mbr_height = partition_mbr[3] - partition_mbr[1]
34 | area_overlap = (mbr_width + query_width) * (mbr_height + query_height)
35 | probability_selection = area_overlap.to_f / overall_mbr_area
36 | expected_bytes = probability_selection * partition[:num_bytes]
37 | end
38 |
39 | puts "Overall quality = #{expected_bytes}"
--------------------------------------------------------------------------------
/src/main/scripts/ruby/install-shadoop.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/ruby
2 | $download_url = ARGV[0]
3 |
4 | unless $download_url
5 | if Dir.glob('/home/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2*.jar').any?
6 | $hadoop_version = 2
7 | $download_url = 'http://spatialhadoop.cs.umn.edu/downloads/spatialhadoop-2.3-2.tar.gz'
8 | elsif Dir.glob('/home/hadoop/hadoop-core-1*.jar').any?
9 | $hadoop_version = 1
10 | $download_url = 'http://spatialhadoop.cs.umn.edu/downloads/spatialhadoop-2.3.tar.gz'
11 | else
12 | raise "Unsupported Hadoop version"
13 | end
14 | end
15 |
16 | system("wget -qO- #{$download_url} | tar -xvz -C /home/hadoop/")
17 |
--------------------------------------------------------------------------------
/src/main/scripts/ruby/vd.rasem:
--------------------------------------------------------------------------------
1 | set_width 3600
2 | set_height 1800
3 | $stdin.each_line do |line|
4 | parts = line.split ','
5 | if parts[0] != 'NaN' && parts[2] != 'NaN'
6 | x1, y1, x2, y2 = parts[0,4].map{|d| d.to_f * 10}
7 | line(x1, y1, x2, y2)
8 | end
9 | end
10 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/OperationsParamsTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop;
2 |
3 | import edu.umn.cs.spatialHadoop.osm.OSMEdge;
4 | import edu.umn.cs.spatialHadoop.osm.OSMPoint;
5 | import junit.framework.Test;
6 | import junit.framework.TestCase;
7 | import junit.framework.TestSuite;
8 |
9 | /**
10 | * Unit test for {@link OperationsParams} class.
11 | */
12 | public class OperationsParamsTest extends TestCase {
13 |
14 | /**
15 | * Create the test case
16 | *
17 | * @param testName
18 | * name of the test case
19 | */
20 | public OperationsParamsTest(String testName) {
21 | super(testName);
22 | }
23 |
24 | /**
25 | * @return the suite of tests being tested
26 | */
27 | public static Test suite() {
28 | return new TestSuite(OperationsParamsTest.class);
29 | }
30 |
31 | public void testDetectPoint() {
32 | String detectedShape = OperationsParams.detectShape(new String[] {
33 | "1,5", "0.5,7.8", "-13.54,33E+15"
34 | });
35 | assertEquals("point", detectedShape);
36 | }
37 |
38 | public void testDetectRectangle() {
39 | String detectedShape = OperationsParams.detectShape(new String[] {
40 | "1,5,-10,100", "0.5,7.8,15,300", "-13.54,33E+15,7,8"
41 | });
42 | assertEquals("rectangle", detectedShape);
43 | }
44 |
45 | public void testDetectOSMEdge() {
46 | String detectedShape = OperationsParams.detectShape(new String[] {
47 | "4200,200516,-1.8230973,52.5541131,1030999477,-1.8230368,52.5540756,420,{\"highway\"=\"residential\"}{\"is_in\"=\"Sutton Coldfield\"}",
48 | "4206,200521,-1.8226204,52.5537103,200522,-1.8223988,52.5534041,420,{\"highway\"=\"residential\"}"
49 | });
50 | assertEquals(OSMEdge.class.getName(), detectedShape);
51 | }
52 |
53 | public void testDetectOSMPolygon() {
54 | String detectedShape = OperationsParams.detectShape(new String[] {
55 | "22987\tPOLYGON ((9.1825602 48.7758568, 9.1823907 48.7757439, 9.1825602 48.7758568))\t[type#multipolygon,building#yes,name#Altes Waisenhaus]",
56 | "167066\tPOLYGON ((18.066006 59.3206665, 18.0663116 59.3205506, 18.0661665 59.3205876, 18.066006 59.3206665))\t[type#multipolygon,building#yes]",
57 | "423037\tPOLYGON ((-40.3031302 -20.2814281, -40.3032133 -20.2813125, -40.3031302 -20.2814281))\t[type#multipolygon,building#yes]"
58 | });
59 | assertEquals("osm", detectedShape);
60 | }
61 |
62 | public void testDetectOSMPoint() {
63 | String detectedShape = OperationsParams.detectShape(new String[] {
64 | "270\t16.6764183\t68.4073792\t[]",
65 | "19\t-0.20698\t51.9458753\t[ref#SG4 90,collection_times#Mo-Fr 16:30; Sa 09:45,box_type#lamp_box,amenity#post_box]",
66 | "151\t10.7757166\t59.9515306\t[]"
67 | });
68 | assertEquals(OSMPoint.class.getName(), detectedShape);
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/TestHelper.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop;
2 |
3 | import edu.umn.cs.spatialHadoop.core.Point;
4 | import edu.umn.cs.spatialHadoop.core.Rectangle;
5 | import edu.umn.cs.spatialHadoop.core.Shape;
6 | import edu.umn.cs.spatialHadoop.operations.RandomSpatialGenerator;
7 | import org.apache.hadoop.fs.Path;
8 |
9 | import java.io.File;
10 | import java.io.FilenameFilter;
11 | import java.io.IOException;
12 |
13 | public class TestHelper {
14 |
15 | public static void generateFile(String filename, Class extends Shape> shapeClass,
16 | Rectangle mbr, long size, OperationsParams params) throws InterruptedException, IOException, ClassNotFoundException {
17 | params.setClass("shape", Point.class, Shape.class);
18 | OperationsParams.setShape(params, "mbr", mbr);
19 | params.setBoolean("local", true);
20 | params.setBoolean("overwrite", true);
21 | params.setLong("size", size);
22 | Path randomFile = new Path("TestHelper.RandomFile");
23 | RandomSpatialGenerator.generateFile(randomFile, params);
24 | File[] generatedFiles = new File(randomFile.getName()).listFiles(new FilenameFilter() {
25 | @Override
26 | public boolean accept(File dir, String name) {
27 | char firstChar = name.charAt(0);
28 | return firstChar != '.' && firstChar != '_';
29 | }
30 | });
31 | if (generatedFiles.length == 1) {
32 | generatedFiles[0].renameTo(new File(filename));
33 | } else {
34 | new File(randomFile.getName()).renameTo(new File(filename));
35 | }
36 | randomFile.getFileSystem(params).deleteOnExit(randomFile);
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/TigerShapeTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop;
2 |
3 | import org.apache.hadoop.io.Text;
4 |
5 | import junit.framework.Test;
6 | import junit.framework.TestCase;
7 | import junit.framework.TestSuite;
8 |
9 | /**
10 | * Unit test for {@link TigerShape} class.
11 | */
12 | public class TigerShapeTest extends TestCase {
13 |
14 | /**
15 | * Create the test case
16 | *
17 | * @param testName
18 | * name of the test case
19 | */
20 | public TigerShapeTest(String testName) {
21 | super(testName);
22 | }
23 |
24 | /**
25 | * @return the suite of tests being tested
26 | */
27 | public static Test suite() {
28 | return new TestSuite(TigerShapeTest.class);
29 | }
30 |
31 | public void testParseTabSeparatedWKT() {
32 | Text text = new Text("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))\t234");
33 | TigerShape shape = new TigerShape();
34 | shape.fromText(text);
35 | assertNotNull(shape.geom);
36 | assertTrue(Math.abs(shape.geom.getArea() - 1.0) < 1E-5);
37 | }
38 |
39 | }
40 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/core/GridInfoTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.core;
2 |
3 | import edu.umn.cs.spatialHadoop.operations.Head;
4 | import edu.umn.cs.spatialHadoop.util.FSUtil;
5 | import junit.framework.Test;
6 | import junit.framework.TestCase;
7 | import junit.framework.TestSuite;
8 | import org.apache.hadoop.conf.Configuration;
9 | import org.apache.hadoop.fs.FileStatus;
10 | import org.apache.hadoop.fs.FileSystem;
11 | import org.apache.hadoop.fs.LocalFileSystem;
12 | import org.apache.hadoop.fs.Path;
13 |
14 | import java.io.IOException;
15 |
16 | /**
17 | * Unit test for the utility class {@link Head}.
18 | */
19 | public class GridInfoTest extends TestCase {
20 |
21 | /**
22 | * Create the test case
23 | *
24 | * @param testName
25 | * name of the test case
26 | */
27 | public GridInfoTest(String testName) {
28 | super(testName);
29 | }
30 |
31 | /**
32 | * @return the suite of tests being tested
33 | */
34 | public static Test suite() {
35 | return new TestSuite(GridInfoTest.class);
36 | }
37 |
38 | public void testGetOverlappingCells() {
39 | GridInfo gridInfo = new GridInfo(0, 0, 16, 16, 16, 16);
40 | java.awt.Rectangle overlappingCells = gridInfo.getOverlappingCells(new Rectangle(0.5, 0.5, 1.5, 1.5));
41 | assertEquals(0, overlappingCells.x);
42 | assertEquals(0, overlappingCells.y);
43 | assertEquals(2, overlappingCells.width);
44 | assertEquals(2, overlappingCells.height);
45 |
46 | overlappingCells = gridInfo.getOverlappingCells(new Rectangle(0.5, 0.5, 1.0, 1.0));
47 | assertEquals(0, overlappingCells.x);
48 | assertEquals(0, overlappingCells.y);
49 | assertEquals(1, overlappingCells.width);
50 | assertEquals(1, overlappingCells.height);
51 |
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/indexing/AbstractRTreeBBPartitionerTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.indexing;
2 |
3 | import edu.umn.cs.spatialHadoop.BaseTest;
4 | import edu.umn.cs.spatialHadoop.core.Point;
5 | import edu.umn.cs.spatialHadoop.core.Rectangle;
6 | import junit.framework.TestCase;
7 | import org.apache.hadoop.conf.Configuration;
8 |
9 | import java.io.IOException;
10 | import java.util.HashSet;
11 | import java.util.Set;
12 |
13 | public class AbstractRTreeBBPartitionerTest extends BaseTest {
14 |
15 | public void testConstruct() throws IOException {
16 | double[][] coords = readFile("src/test/resources/test.points");
17 | Point[] points = new Point[coords[0].length];
18 | for (int i = 0; i < points.length; i++) {
19 | points[i] = new Point(coords[0][i], coords[1][i]);
20 | }
21 | AbstractRTreeBBPartitioner p = new AbstractRTreeBBPartitioner.RTreeGuttmanBBPartitioner();
22 | p.setup(new Configuration());
23 | p.construct(null, points, 4);
24 | assertTrue("Too few partitions", p.getPartitionCount() > 2);
25 | Set partitions = new HashSet();
26 | for (Point pt : points) {
27 | partitions.add(p.overlapPartition(pt));
28 | }
29 | assertEquals(p.getPartitionCount(), partitions.size());
30 | }
31 |
32 | public void testOverlapPartitionShouldChooseMinimalArea() {
33 | Rectangle[] partitions = { new Rectangle(0,0,4,4),
34 | new Rectangle(1,1,3,3)};
35 | AbstractRTreeBBPartitioner p = new AbstractRTreeBBPartitioner.RTreeGuttmanBBPartitioner();
36 | initializeBBPartitioner(p, partitions);
37 | assertEquals(0, p.overlapPartition(new Point(0.5, 0.5)));
38 | assertEquals(1, p.overlapPartition(new Point(2, 2)));
39 | }
40 |
41 | private void initializeBBPartitioner(AbstractRTreeBBPartitioner p, Rectangle[] partitions) {
42 | p.x1s = new double[partitions.length];
43 | p.y1s = new double[partitions.length];
44 | p.x2s = new double[partitions.length];
45 | p.y2s = new double[partitions.length];
46 | for (int i = 0; i < partitions.length; i++) {
47 | p.x1s[i] = partitions[i].x1;
48 | p.x2s[i] = partitions[i].x2;
49 | p.y1s[i] = partitions[i].y1;
50 | p.y2s[i] = partitions[i].y2;
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/indexing/PartitionerTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.indexing;
2 |
3 | import edu.umn.cs.spatialHadoop.BaseTest;
4 | import edu.umn.cs.spatialHadoop.core.CellInfo;
5 | import junit.framework.TestCase;
6 | import org.apache.hadoop.conf.Configuration;
7 | import org.apache.hadoop.fs.FileSystem;
8 | import org.apache.hadoop.fs.Path;
9 | import org.apache.hadoop.io.Text;
10 |
11 | import java.io.IOException;
12 | import java.io.PrintStream;
13 |
14 | public class PartitionerTest extends BaseTest {
15 |
16 | public void testGenerateMasterWKT() {
17 | try {
18 | Path masterPath = new Path(scratchPath, "_master.rstar");
19 | FileSystem fs = masterPath.getFileSystem(new Configuration());
20 | PrintStream ps = new PrintStream(fs.create(masterPath, true));
21 | Partition p = new Partition("data000", new CellInfo(1, 0,5,100,105));
22 | ps.println(p.toText(new Text()));
23 | ps.close();
24 |
25 | Partitioner.generateMasterWKT(fs, masterPath);
26 | Path wktPath = new Path(scratchPath, "_rstar.wkt");
27 | assertTrue("WKT file not found!", fs.exists(wktPath));
28 |
29 | } catch (IOException e) {
30 | e.printStackTrace();
31 | fail("Error in test!");
32 | }
33 |
34 | }
35 | }
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/indexing/RRStarLocalInexTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.indexing;
2 |
3 | import edu.umn.cs.spatialHadoop.BaseTest;
4 | import edu.umn.cs.spatialHadoop.core.Point;
5 | import junit.framework.Test;
6 | import junit.framework.TestCase;
7 | import junit.framework.TestSuite;
8 | import org.apache.hadoop.conf.Configuration;
9 | import org.apache.hadoop.fs.FSDataInputStream;
10 | import org.apache.hadoop.fs.FileSystem;
11 | import org.apache.hadoop.fs.Path;
12 |
13 | import java.io.File;
14 | import java.io.IOException;
15 |
16 | /**
17 | * Unit test for the RTreeGuttman class
18 | */
19 | public class RRStarLocalInexTest extends BaseTest {
20 |
21 | /**
22 | * Create the test case
23 | *
24 | * @param testName
25 | * name of the test case
26 | */
27 | public RRStarLocalInexTest(String testName) {
28 | super(testName);
29 | }
30 |
31 | /**
32 | * @return the suite of tests being tested
33 | */
34 | public static Test suite() {
35 | return new TestSuite(RRStarLocalInexTest.class);
36 | }
37 |
38 | public void testIndexWriteRead() {
39 | Path indexFile = new Path(scratchPath, "tempout");
40 | try {
41 | RRStarLocalIndex lindex = new RRStarLocalIndex();
42 | File heapFile = new File("src/test/resources/test.points");
43 | Configuration conf = new Configuration();
44 | lindex.setup(conf);
45 | lindex.buildLocalIndex(heapFile, indexFile, new Point());
46 |
47 | lindex = new RRStarLocalIndex();
48 | lindex.setup(conf);
49 | FileSystem fs = indexFile.getFileSystem(conf);
50 | FSDataInputStream in = fs.open(indexFile);
51 | long len = fs.getFileStatus(indexFile).getLen();
52 | lindex.read(in, 0, len, new Point());
53 | Iterable extends Point> results = lindex.search(0, 0, 5, 5);
54 | int count = 0;
55 | for (Point p : results)
56 | count++;
57 | assertEquals(2, count);
58 |
59 | results = lindex.scanAll();
60 | count = 0;
61 | for (Point p : results)
62 | count++;
63 | assertEquals(11, count);
64 | } catch (Exception e) {
65 | e.printStackTrace();
66 | fail("Error writing or reading the index");
67 | }
68 | }
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/indexing/STRPartitionerTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.indexing;
2 |
3 | import edu.umn.cs.spatialHadoop.core.Point;
4 | import edu.umn.cs.spatialHadoop.core.Rectangle;
5 | import edu.umn.cs.spatialHadoop.io.MemoryInputStream;
6 | import edu.umn.cs.spatialHadoop.util.IntArray;
7 | import junit.framework.Test;
8 | import junit.framework.TestCase;
9 | import junit.framework.TestSuite;
10 | import org.apache.hadoop.fs.FSDataInputStream;
11 |
12 | import java.io.*;
13 | import java.nio.ByteBuffer;
14 |
15 | /**
16 | * Unit test for the STRPartitioner class
17 | */
18 | public class STRPartitionerTest extends TestCase {
19 |
20 | /**
21 | * Create the test case
22 | *
23 | * @param testName
24 | * name of the test case
25 | */
26 | public STRPartitionerTest(String testName) {
27 | super(testName);
28 | }
29 |
30 | /**
31 | * @return the suite of tests being tested
32 | */
33 | public static Test suite() {
34 | return new TestSuite(STRPartitionerTest.class);
35 | }
36 |
37 | public void testSmallFiles() {
38 | STRPartitioner str = new STRPartitioner();
39 | str.construct(null, new Point[] {new Point(1, 1), new Point(2, 2)}, 10);
40 | assertEquals(1, str.getPartitionCount());
41 | int i = str.overlapPartition(new Rectangle(0, 0, 3, 3));
42 | assertEquals(0, i);
43 | assertNotNull(str.getPartition(0));
44 | assertNotNull(str.getPartitionAt(0));
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/operations/KNNTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.operations;
2 |
3 | import edu.umn.cs.spatialHadoop.BaseTest;
4 | import edu.umn.cs.spatialHadoop.OperationsParams;
5 | import edu.umn.cs.spatialHadoop.indexing.Indexer;
6 | import junit.framework.Test;
7 | import junit.framework.TestCase;
8 | import junit.framework.TestSuite;
9 | import org.apache.hadoop.fs.Path;
10 | import org.apache.hadoop.mapreduce.Job;
11 |
12 | import java.io.IOException;
13 |
14 | /**
15 | * Unit test for {@link LocalSampler} class.
16 | */
17 | public class KNNTest extends BaseTest {
18 |
19 | /**
20 | * Create the test case
21 | *
22 | * @param testName
23 | * name of the test case
24 | */
25 | public KNNTest(String testName) {
26 | super(testName);
27 | }
28 |
29 | /**
30 | * @return the suite of tests being tested
31 | */
32 | public static Test suite() {
33 | return new TestSuite(KNNTest.class);
34 | }
35 |
36 | public void testKNNWithLocalIndex() {
37 | try {
38 | Path inputFile = new Path("src/test/resources/test.points");
39 | Path outPath = new Path(scratchPath, "tempout");
40 | OperationsParams params = new OperationsParams();
41 | params.set("shape", "point");
42 | params.set("sindex", "rtree");
43 | params.setBoolean("local", false);
44 | outPath.getFileSystem(params).delete(outPath, true);
45 | Indexer.index(inputFile, outPath, params);
46 |
47 | // Now run a knn local query on the index
48 | params.set("point", "0,0");
49 | params.setInt("k", 3);
50 | params.setBoolean("local", true);
51 | KNN.knn(outPath, null, params);
52 | } catch (Exception e) {
53 | e.printStackTrace();
54 | fail("Error while indexing");
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/operations/SJMRTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.operations;
2 |
3 | import edu.umn.cs.spatialHadoop.BaseTest;
4 | import edu.umn.cs.spatialHadoop.OperationsParams;
5 | import edu.umn.cs.spatialHadoop.core.Rectangle;
6 | import edu.umn.cs.spatialHadoop.core.Shape;
7 | import edu.umn.cs.spatialHadoop.core.SpatialSite;
8 | import edu.umn.cs.spatialHadoop.indexing.Partitioner;
9 | import edu.umn.cs.spatialHadoop.indexing.STRPartitioner;
10 | import edu.umn.cs.spatialHadoop.util.FileUtil;
11 | import junit.framework.TestCase;
12 | import org.apache.hadoop.fs.FileSystem;
13 | import org.apache.hadoop.fs.Path;
14 |
15 | import java.io.IOException;
16 |
17 | public class SJMRTest extends BaseTest {
18 |
19 | public void testSjmr() throws IOException, InterruptedException {
20 | Path inFile = new Path("src/test/resources/test.rect");
21 | Path inFile1 = new Path(scratchPath, "file1");
22 | Path inFile2 = new Path(scratchPath, "file2");
23 | Path outFile = new Path(scratchPath, "sjmrout");
24 |
25 | OperationsParams params = new OperationsParams();
26 | FileSystem fs = inFile.getFileSystem(params);
27 | fs.copyToLocalFile(inFile, inFile1);
28 | fs.copyToLocalFile(inFile, inFile2);
29 | params.setClass("shape", Rectangle.class, Shape.class);
30 | SJMR.sjmr(new Path[]{inFile1, inFile2}, outFile, params);
31 | String[] results = readTextFile(outFile.toString());
32 | assertEquals(14, results.length);
33 | }
34 |
35 | public void testSjmrWithSTRPartitioner() throws IOException, InterruptedException {
36 | Path inFile = new Path("src/test/resources/test.rect");
37 | Path inFile1 = new Path(scratchPath, "file1");
38 | Path inFile2 = new Path(scratchPath, "file2");
39 | Path outFile = new Path(scratchPath, "sjmrout");
40 |
41 | OperationsParams params = new OperationsParams();
42 | params.setClass("partitioner", STRPartitioner.class, Partitioner.class);
43 | params.setFloat(SpatialSite.SAMPLE_RATIO, 1.0f);
44 | FileSystem fs = inFile.getFileSystem(params);
45 | fs.copyToLocalFile(inFile, inFile1);
46 | fs.copyToLocalFile(inFile, inFile2);
47 | params.setClass("shape", Rectangle.class, Shape.class);
48 | SJMR.sjmr(new Path[]{inFile1, inFile2}, outFile, params);
49 | String[] results = readTextFile(outFile.toString());
50 | assertEquals(14, results.length);
51 | }
52 | }
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/operations/SamplerTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.operations;
2 |
3 | import edu.umn.cs.spatialHadoop.OperationsParams;
4 | import edu.umn.cs.spatialHadoop.core.Point;
5 | import edu.umn.cs.spatialHadoop.core.Rectangle;
6 | import edu.umn.cs.spatialHadoop.core.Shape;
7 | import junit.framework.TestCase;
8 | import org.apache.hadoop.fs.FileSystem;
9 | import org.apache.hadoop.fs.Path;
10 |
11 | public class SamplerTest extends TestCase {
12 |
13 | /**A scratch area used to do all the tests which gets wiped at the end*/
14 | protected Path scratchPath = new Path("testindex");
15 |
16 | @Override
17 | protected void tearDown() throws Exception {
18 | OperationsParams params = new OperationsParams();
19 | FileSystem fs = scratchPath.getFileSystem(params);
20 | fs.delete(scratchPath, true);
21 | }
22 |
23 | @Override
24 | protected void setUp() throws Exception {
25 | super.setUp();
26 | OperationsParams params = new OperationsParams();
27 | FileSystem fs = scratchPath.getFileSystem(params);
28 | if (fs.exists(scratchPath))
29 | fs.delete(scratchPath, true);
30 | if (!fs.exists(scratchPath))
31 | fs.mkdirs(scratchPath);
32 | }
33 |
34 | public void testTakeSample() {
35 | Path input = new Path("src/test/resources/test.rect");
36 | OperationsParams params = new OperationsParams();
37 | params.setClass("shape", Rectangle.class, Shape.class);
38 | params.setClass("outshape", Point.class, Shape.class);
39 | params.setFloat("ratio", 1.0f); // Read all records
40 | try {
41 | String[] lines = Sampler.takeSample(new Path[]{input}, params);
42 | assertEquals(14, lines.length);
43 | // Make sure that they are points
44 | assertEquals(2, lines[0].split(",").length);
45 | } catch (Exception e) {
46 | e.printStackTrace();
47 | fail("Error in test");
48 | }
49 | }
50 |
51 | public void testSampleConvert() {
52 | Path input = new Path("src/test/resources/test.rect");
53 | Path output = new Path(scratchPath, "sampled");
54 | OperationsParams params = new OperationsParams();
55 | params.setClass("shape", Rectangle.class, Shape.class);
56 | params.setClass("outshape", Point.class, Shape.class);
57 | params.setFloat("ratio", 1.0f); // Read all records
58 | try {
59 | Sampler.sampleMapReduce(new Path[]{input}, output, params);
60 | String[] lines = Head.head(output.getFileSystem(params), output, 20);
61 | assertEquals(14, lines.length);
62 | // Make sure that they are points
63 | assertEquals(2, lines[0].split(",").length);
64 | } catch (Exception e) {
65 | e.printStackTrace();
66 | fail("Error in test");
67 | }
68 | }
69 | }
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/util/BitArrayTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.util;
2 |
3 | import edu.umn.cs.spatialHadoop.operations.Head;
4 | import junit.framework.Test;
5 | import junit.framework.TestCase;
6 | import junit.framework.TestSuite;
7 |
8 | import java.io.*;
9 |
10 | /**
11 | * Unit test for the utility class {@link Head}.
12 | */
13 | public class BitArrayTest extends TestCase {
14 |
15 | /**
16 | * Create the test case
17 | *
18 | * @param testName
19 | * name of the test case
20 | */
21 | public BitArrayTest(String testName) {
22 | super(testName);
23 | }
24 |
25 | /**
26 | * @return the suite of tests being tested
27 | */
28 | public static Test suite() {
29 | return new TestSuite(BitArrayTest.class);
30 | }
31 |
32 | public void testRandomInsert() {
33 | BitArray bitArray = new BitArray(20);
34 | bitArray.set(5, true);
35 | assertTrue(bitArray.get(5));
36 | bitArray.resize(10000);
37 | bitArray.set(10000, true);
38 | assertTrue(bitArray.get(5));
39 | assertTrue(bitArray.get(10000));
40 | }
41 |
42 | public void testCountOnes() {
43 | BitArray bitArray = new BitArray(100);
44 | for (int i = 0; i < 100; i++) {
45 | bitArray.set(i, true);
46 | assertEquals(i+1, bitArray.countOnes());
47 | }
48 | }
49 |
50 | public void testReadWrite() {
51 | BitArray bitArray = new BitArray(20);
52 | bitArray.set(5, true);
53 | bitArray.set(10, true);
54 | ByteArrayOutputStream baos = new ByteArrayOutputStream();
55 | DataOutputStream dos = new DataOutputStream(baos);
56 | try {
57 | bitArray.write(dos);
58 | dos.close();
59 | } catch (IOException e) {
60 | fail("Error in write");
61 | }
62 | byte[] buffer = baos.toByteArray();
63 |
64 | DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
65 | bitArray = new BitArray();
66 | try {
67 | bitArray.readFields(in);
68 | in.close();
69 | } catch (IOException e) {
70 | fail("Error in read");
71 | }
72 | assertTrue(bitArray.get(5));
73 | assertTrue(bitArray.get(10));
74 | assertEquals(20, bitArray.size);
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/util/IntArrayTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.util;
2 |
3 | import edu.umn.cs.spatialHadoop.operations.Head;
4 | import junit.framework.Test;
5 | import junit.framework.TestCase;
6 | import junit.framework.TestSuite;
7 | import org.apache.hadoop.conf.Configuration;
8 | import org.apache.hadoop.fs.FileStatus;
9 | import org.apache.hadoop.fs.FileSystem;
10 | import org.apache.hadoop.fs.LocalFileSystem;
11 | import org.apache.hadoop.fs.Path;
12 |
13 | import java.io.IOException;
14 | import java.util.Arrays;
15 | import java.util.Comparator;
16 |
17 | /**
18 | * Unit test for the utility class {@link Head}.
19 | */
20 | public class IntArrayTest extends TestCase {
21 |
22 | /**
23 | * Create the test case
24 | *
25 | * @param testName
26 | * name of the test case
27 | */
28 | public IntArrayTest(String testName) {
29 | super(testName);
30 | }
31 |
32 | /**
33 | * @return the suite of tests being tested
34 | */
35 | public static Test suite() {
36 | return new TestSuite(IntArrayTest.class);
37 | }
38 |
39 | public void testRandomInsert() {
40 | IntArray array = new IntArray();
41 | array.add(5);
42 | array.insert(0, 3);
43 | assertEquals(2, array.size());
44 | assertEquals(3, array.get(0));
45 | assertEquals(5, array.get(1));
46 | }
47 |
48 | public void testSetFunction() {
49 | IntArray array = new IntArray();
50 | array.add(5);
51 | assertEquals(5, array.get(0));
52 | array.set(0, 15);
53 | assertEquals(15, array.get(0));
54 | assertEquals(1, array.size);
55 | }
56 |
57 | public void testInsertionSort() {
58 | IntArray array = new IntArray();
59 | array.add(5);
60 | array.add(3);
61 | array.add(1);
62 | array.add(10);
63 | array.insertionSort(new Comparator() {
64 | @Override
65 | public int compare(Integer o1, Integer o2) {
66 | return o1 - o2;
67 | }
68 | });
69 |
70 | assertTrue("Array not sorted", Arrays.equals(new int[] {1,3,5,10}, array.toArray()));
71 | }
72 |
73 | public void testInsertionSortTwoElements() {
74 | IntArray array = new IntArray();
75 | array.add(5);
76 | array.add(3);
77 | array.insertionSort(new Comparator() {
78 | @Override
79 | public int compare(Integer o1, Integer o2) {
80 | return o1 - o2;
81 | }
82 | });
83 |
84 | assertTrue("Array not sorted", Arrays.equals(new int[] {3,5}, array.toArray()));
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/util/SampleIterableTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.util;
2 |
3 | import edu.umn.cs.spatialHadoop.OperationsParams;
4 | import junit.framework.TestCase;
5 | import org.apache.hadoop.fs.FileSystem;
6 | import org.apache.hadoop.fs.Path;
7 | import org.apache.hadoop.io.Text;
8 | import org.apache.hadoop.mapreduce.lib.input.FileSplit;
9 |
10 | import java.io.InputStream;
11 | import java.util.Iterator;
12 |
13 |
14 | public class SampleIterableTest extends TestCase {
15 |
16 | public SampleIterableTest(String testName) {
17 | super(testName);
18 | }
19 |
20 | public void testFullSampleTextFile() {
21 | Path fileToSample = new Path("src/test/resources/test.points");
22 | OperationsParams params = new OperationsParams();
23 | try {
24 | FileSystem fs = fileToSample.getFileSystem(params);
25 | InputStream in = fs.open(fileToSample);
26 | SampleIterable siter = new SampleIterable(in, 0, fs.getFileStatus(fileToSample).getLen(), 1.0f, 0);
27 | int count = 0;
28 | for (Text t : siter)
29 | ++count;
30 | assertEquals(11, count);
31 | assertEquals(1.0f, siter.getProgress());
32 | } catch (Exception e) {
33 | e.printStackTrace();
34 | fail("Error in test");
35 | }
36 | }
37 |
38 | public void testUnderlyingIterator() {
39 | Path fileToSample = new Path("src/test/resources/test.points");
40 | OperationsParams params = new OperationsParams();
41 | try {
42 | FileSystem fs = fileToSample.getFileSystem(params);
43 | InputStream in = fs.open(fileToSample);
44 | SampleIterable siter = new SampleIterable(in, 0, fs.getFileStatus(fileToSample).getLen(), 1.0f, 0);
45 | Iterator i = siter.iterator();
46 | i.hasNext();
47 | i.hasNext();
48 | i.hasNext();
49 | int count = 0;
50 | while (i.hasNext()) {
51 | i.next();
52 | ++count;
53 | }
54 | assertEquals(11, count);
55 | } catch (Exception e) {
56 | e.printStackTrace();
57 | fail("Error in test");
58 | }
59 | }
60 | }
--------------------------------------------------------------------------------
/src/test/java/edu/umn/cs/spatialHadoop/visualization/TileIndexTest.java:
--------------------------------------------------------------------------------
1 | package edu.umn.cs.spatialHadoop.visualization;
2 |
3 | import edu.umn.cs.spatialHadoop.core.Rectangle;
4 | import junit.framework.Test;
5 | import junit.framework.TestCase;
6 | import junit.framework.TestSuite;
7 |
8 | /**
9 | * Unit test for simple App.
10 | */
11 | public class TileIndexTest extends TestCase {
12 |
13 | /**
14 | * Create the test case
15 | *
16 | * @param testName
17 | * name of the test case
18 | */
19 | public TileIndexTest(String testName) {
20 | super(testName);
21 | }
22 |
23 | /**
24 | * @return the suite of tests being tested
25 | */
26 | public static Test suite() {
27 | return new TestSuite(TileIndexTest.class);
28 | }
29 |
30 | public void testGetMBR() {
31 | Rectangle spaceMBR = new Rectangle(0, 0, 1024, 1024);
32 | int x = 0, y = 0, z = 0;
33 | Rectangle tileMBR = TileIndex.getMBR(spaceMBR, z, x, y);
34 | Rectangle expectedMBR = spaceMBR;
35 | assertTrue("Expected MBR of ("+z+","+x+","+y+") to be "+expectedMBR+" but found to be "+tileMBR,
36 | expectedMBR.equals(tileMBR));
37 |
38 | z = 1;
39 | expectedMBR = new Rectangle(spaceMBR.x1, spaceMBR.y1,
40 | spaceMBR.getWidth() / 2, spaceMBR.getHeight() / 2);
41 | tileMBR = TileIndex.getMBR(spaceMBR, z, x, y);
42 | assertTrue("Expected MBR of ("+z+","+x+","+y+") to be "+expectedMBR+" but found to be "+tileMBR,
43 | expectedMBR.equals(tileMBR));
44 |
45 | z = 10;
46 | x = 100;
47 | y = 100;
48 | expectedMBR = new Rectangle(100, 100, 101, 101);
49 | tileMBR = TileIndex.getMBR(spaceMBR, z, x, y);
50 | assertTrue("Expected MBR of ("+z+","+x+","+y+") to be "+expectedMBR+" but found to be "+tileMBR,
51 | expectedMBR.equals(tileMBR));
52 | }
53 |
54 | public void testWithNonZeroOriginal() {
55 | Rectangle spaceMBR = new Rectangle(1024, 1024, 2048, 2048);
56 | long ti = TileIndex.encode(0, 0 ,0);
57 | Rectangle tileMBR = TileIndex.getMBR(spaceMBR, ti);
58 | Rectangle expectedMBR = spaceMBR;
59 | assertTrue("Expected MBR of "+ti+" to be "+expectedMBR+" but found to be "+tileMBR,
60 | expectedMBR.equals(tileMBR));
61 | }
62 |
63 | public void testEncodeAndDecode() {
64 | long ti = TileIndex.encode(6, 5, 7);
65 | assertEquals(0x60000500007L, ti);
66 | TileIndex tileIndex = null;
67 | tileIndex = TileIndex.decode(ti, tileIndex);
68 | assertEquals(6, tileIndex.z);
69 | assertEquals(5, tileIndex.x);
70 | assertEquals(7, tileIndex.y);
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/DT.rb:
--------------------------------------------------------------------------------
1 | require 'rasem'
2 |
3 | if ARGV.length == 0
4 | raise "Must provide input file"
5 | end
6 |
7 | input_file = ARGV[0]
8 |
9 | points_file = "#{input_file}.points"
10 | triangles_file = "#{input_file}.triangles"
11 | unsafe_sites_file = "#{input_file}.unsafe_sites"
12 |
13 | points = File.read(points_file).each_line.map do |line|
14 | line.split(",").map(&:to_f)
15 | end
16 |
17 | triangles = File.read(triangles_file).each_line.map do |line|
18 | line.split("\t").map{|part| part.split(",").map(&:to_f)}
19 | end
20 |
21 | unsafe_sites = File.read(unsafe_sites_file).each_line.map {|l| l.to_i == 1} if File.exists?(unsafe_sites_file)
22 |
23 | all_xs = points.map{|p| p[0]}
24 | all_ys = points.map{|p| p[1]}
25 |
26 | point_size = 4
27 |
28 | width = 100
29 | height = 100
30 |
31 | img = Rasem::SVGImage.new(:width=>(width+2*point_size), :height=>(height+2*point_size)) do
32 | @mbr = [all_xs.min, all_ys.min, all_xs.max, all_ys.max]
33 | @scale = 100 / [(@mbr[2] - @mbr[0]) , (@mbr[3] - @mbr[1])].max
34 | @point_size = 12
35 | def project_point(pt)
36 | [@point_size+(pt[0] - @mbr[0]) * @scale, @point_size+(pt[1] - @mbr[1])*@scale]
37 | end
38 |
39 | points.each_with_index do |point, index|
40 | projected_point = project_point(point)
41 | if unsafe_sites
42 | if unsafe_sites[index]
43 | # An unsafe site, draw as a circle
44 | circle projected_point[0], projected_point[1], point_size/2, :fill => :none
45 | else
46 | # A safe site, draw as a square
47 | rectangle projected_point[0] - point_size/2, projected_point[1] - point_size/2,
48 | point_size, point_size, :fill => :none
49 | end
50 | else
51 | circle projected_point[0], projected_point[1], point_size / 2
52 | end
53 | text(projected_point[0], projected_point[1]+@point_size) { raw index.to_s }
54 | end
55 |
56 | for triangle in triangles do
57 | polygon *(triangle.map{|pt| project_point(pt)}.flatten), :fill => :none
58 | end
59 | end
60 |
61 | File.open("#{input_file}.svg", "w") {|f| img.write(f)}
62 |
63 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt1.points:
--------------------------------------------------------------------------------
1 | 50,50
2 | 60,10
3 | 70,80
4 | 80,50
5 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt1.triangles:
--------------------------------------------------------------------------------
1 | 50,50 60,10 80,50
2 | 50,50 80,50 70,80
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt10.points:
--------------------------------------------------------------------------------
1 | 080,050
2 | 100,050
3 | 100,070
4 | 100,080
5 | 100,110
6 | 110,030
7 | 150,020
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt10.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt10.triangles:
--------------------------------------------------------------------------------
1 | 080,050 100,050 110,030
2 | 080,050 100,070 100,050
3 | 080,050 100,080 100,070
4 | 080,050 100,110 100,080
5 | 100,050 150,020 110,030
6 | 100,070 150,020 100,050
7 | 100,080 150,020 100,070
8 | 100,110 150,020 100,080
9 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt11.points:
--------------------------------------------------------------------------------
1 | 200,700
2 | 200,950
3 | 250,850
4 | 300,850
5 | 350,900
6 | 550,450
7 | 550,900
8 | 700,650
9 | 700,800
10 | 750,900
11 | 900,750
12 | 900,850
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt11.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt12.points:
--------------------------------------------------------------------------------
1 | 180,130
2 | 180,140
3 | 190,110
4 | 210,290
5 | 220,290
6 | 240,290
7 | 260,290
8 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt12.triangles:
--------------------------------------------------------------------------------
1 | 180,130 180,140 190,110
2 | 180,140 210,290 220,290
3 | 180,140 220,290 240,290
4 | 180,140 240,290 260,290
5 | 180,140 260,290 190,110
6 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt13.points:
--------------------------------------------------------------------------------
1 | 141.751295,43.204941
2 | 141.751331,43.204880
3 | 141.751331,43.204969
4 | 141.751362,43.204993
5 | 141.757141,43.199280
6 | 141.759886,43.204065
7 | 141.760021,43.202379
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt13.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt14.points:
--------------------------------------------------------------------------------
1 | 0,0
2 | 0,1
3 | 0,2
4 | 0,3
5 | 0,4
6 | 0,5
7 | 0,6
8 | 0,7
9 | 0,8
10 | 0,9
11 | 1,0
12 | 1,1
13 | 1,2
14 | 1,3
15 | 1,4
16 | 1,5
17 | 1,6
18 | 1,7
19 | 1,8
20 | 1,9
21 | 2,0
22 | 2,1
23 | 2,2
24 | 2,3
25 | 2,4
26 | 2,5
27 | 2,6
28 | 2,7
29 | 2,8
30 | 2,9
31 | 3,0
32 | 3,1
33 | 3,2
34 | 3,3
35 | 3,4
36 | 3,5
37 | 3,6
38 | 3,7
39 | 3,8
40 | 3,9
41 | 4,0
42 | 4,1
43 | 4,2
44 | 4,3
45 | 4,4
46 | 4,5
47 | 4,6
48 | 4,7
49 | 4,8
50 | 4,9
51 | 5,0
52 | 5,1
53 | 5,2
54 | 5,3
55 | 5,4
56 | 5,5
57 | 5,6
58 | 5,7
59 | 5,8
60 | 5,9
61 | 6,0
62 | 6,1
63 | 6,2
64 | 6,3
65 | 6,4
66 | 6,5
67 | 6,6
68 | 6,7
69 | 6,8
70 | 6,9
71 | 7,0
72 | 7,1
73 | 7,2
74 | 7,3
75 | 7,4
76 | 7,5
77 | 7,6
78 | 7,7
79 | 7,8
80 | 7,9
81 | 8,0
82 | 8,1
83 | 8,2
84 | 8,3
85 | 8,4
86 | 8,5
87 | 8,6
88 | 8,7
89 | 8,8
90 | 8,9
91 | 9,0
92 | 9,1
93 | 9,2
94 | 9,3
95 | 9,4
96 | 9,5
97 | 9,6
98 | 9,7
99 | 9,8
100 | 9,9
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt15.points:
--------------------------------------------------------------------------------
1 | 315853.98,4094996.9
2 | 315854.05,4094996.97
3 | 315854.42,4094996.78
4 | 315854.48,4094996.82
5 | 315854.52,4094997.02
6 | 315854.1,4094997.39
7 | 315854.22,4094997.13
8 | 315854.25,4094997.11
9 | 315854.35,4094997.14
10 | 315854.54,4094997.03
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt15.triangles:
--------------------------------------------------------------------------------
1 | 315854.05,4094996.97 315854.22,4094997.13 315854.1,4094997.39
2 | 315853.98,4094996.9 315854.05,4094996.97 315854.1,4094997.39
3 | 315854.05,4094996.97 315853.98,4094996.9 315854.42,4094996.78
4 | 315854.35,4094997.14 315854.22,4094997.13 315854.25,4094997.11
5 | 315854.1,4094997.39 315854.22,4094997.13 315854.35,4094997.14
6 | 315854.22,4094997.13 315854.05,4094996.97 315854.25,4094997.11
7 | 315854.1,4094997.39 315854.35,4094997.14 315854.54,4094997.03
8 | 315854.25,4094997.11 315854.05,4094996.97 315854.42,4094996.78
9 | 315854.48,4094996.82 315854.52,4094997.02 315854.42,4094996.78
10 | 315854.42,4094996.78 315854.52,4094997.02 315854.25,4094997.11
11 | 315854.52,4094997.02 315854.35,4094997.14 315854.25,4094997.11
12 | 315854.54,4094997.03 315854.52,4094997.02 315854.48,4094996.82
13 | 315854.54,4094997.03 315854.35,4094997.14 315854.52,4094997.02
14 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt16.points:
--------------------------------------------------------------------------------
1 | 315852.67,4094997.53
2 | 315852.94,4094997.43
3 | 315852.77,4094997.66
4 | 315852.85,4094997.59
5 | 315852.88,4094997.71
6 | 315852.9,4094997.73
7 | 315852.97,4094997.73
8 | 315852.62,4094997.76
9 | 315852.96,4094998.1
10 | 315852.99,4094997.9
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt17.points:
--------------------------------------------------------------------------------
1 | 315853.62,4094994.34
2 | 315853.68,4094994.46
3 | 315853.54,4094994.63
4 | 315853.55,4094994.63
5 | 315853.65,4094994.57
6 | 315853.45,4094994.75
7 | 315853.62,4094994.64
8 | 315853.49,4094994.95
9 | 315853.65,4094994.86
10 | 315853.66,4094994.94
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt2.points:
--------------------------------------------------------------------------------
1 | 11,89
2 | 18,69
3 | 22,25
4 | 27,75
5 | 34,13
6 | 49,95
7 | 58,14
8 | 65,50
9 | 75,25
10 | 95,54
11 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt2.triangles:
--------------------------------------------------------------------------------
1 | 65,50 34,13 58,14
2 | 22,25 18,69 11,89
3 | 18,69 27,75 11,89
4 | 27,75 49,95 11,89
5 | 65,50 22,25 34,13
6 | 27,75 65,50 49,95
7 | 49,95 65,50 95,54
8 | 75,25 95,54 65,50
9 | 75,25 65,50 58,14
10 | 27,75 18,69 65,50
11 | 65,50 18,69 22,25
12 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt3.points:
--------------------------------------------------------------------------------
1 | 13,691
2 | 29,447
3 | 37,387
4 | 49,245
5 | 67,29
6 | 74,336
7 | 77,677
8 | 78,941
9 | 84,875
10 | 95,844
11 | 98,232
12 | 105,258
13 | 112,62
14 | 117,669
15 | 125,653
16 | 135,220
17 | 141,710
18 | 143,747
19 | 143,888
20 | 155,645
21 | 198,691
22 | 207,616
23 | 223,380
24 | 241,320
25 | 267,773
26 | 271,534
27 | 286,641
28 | 290,151
29 | 294,859
30 | 297,662
31 | 298,21
32 | 319,653
33 | 319,877
34 | 325,862
35 | 331,297
36 | 358,118
37 | 358,478
38 | 360,948
39 | 380,834
40 | 387,788
41 | 408,517
42 | 416,646
43 | 428,753
44 | 440,75
45 | 441,6
46 | 451,718
47 | 457,551
48 | 473,262
49 | 477,481
50 | 491,761
51 | 503,882
52 | 508,29
53 | 515,53
54 | 517,911
55 | 524,947
56 | 536,565
57 | 539,791
58 | 555,730
59 | 577,778
60 | 589,712
61 | 602,862
62 | 605,371
63 | 608,737
64 | 616,958
65 | 633,790
66 | 652,260
67 | 667,67
68 | 668,514
69 | 671,223
70 | 688,963
71 | 692,223
72 | 702,81
73 | 715,275
74 | 719,854
75 | 726,580
76 | 730,725
77 | 730,865
78 | 734,778
79 | 738,72
80 | 745,245
81 | 784,563
82 | 789,243
83 | 792,450
84 | 797,557
85 | 835,541
86 | 860,373
87 | 872,315
88 | 901,241
89 | 905,152
90 | 913,357
91 | 916,723
92 | 916,871
93 | 938,635
94 | 953,602
95 | 957,222
96 | 960,375
97 | 978,901
98 | 984,762
99 | 986,4
100 | 986,593
101 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt3.unsafe_sites:
--------------------------------------------------------------------------------
1 | 1
2 | 1
3 | 1
4 | 1
5 | 1
6 | 1
7 | 1
8 | 1
9 | 1
10 | 1
11 | 1
12 | 0
13 | 1
14 | 0
15 | 1
16 | 0
17 | 0
18 | 1
19 | 1
20 | 1
21 | 0
22 | 1
23 | 0
24 | 0
25 | 0
26 | 0
27 | 0
28 | 1
29 | 1
30 | 0
31 | 1
32 | 0
33 | 1
34 | 0
35 | 0
36 | 1
37 | 0
38 | 1
39 | 0
40 | 0
41 | 0
42 | 0
43 | 0
44 | 1
45 | 1
46 | 0
47 | 0
48 | 0
49 | 0
50 | 0
51 | 1
52 | 1
53 | 1
54 | 1
55 | 1
56 | 0
57 | 0
58 | 0
59 | 0
60 | 0
61 | 0
62 | 0
63 | 0
64 | 1
65 | 0
66 | 0
67 | 1
68 | 0
69 | 0
70 | 1
71 | 0
72 | 1
73 | 0
74 | 0
75 | 0
76 | 0
77 | 1
78 | 0
79 | 1
80 | 0
81 | 0
82 | 0
83 | 0
84 | 0
85 | 1
86 | 1
87 | 0
88 | 1
89 | 1
90 | 1
91 | 1
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt4.points:
--------------------------------------------------------------------------------
1 | 0,750
2 | 50,650
3 | 100,0
4 | 100,200
5 | 150,100
6 | 150,450
7 | 200,450
8 | 200,750
9 | 250,100
10 | 250,700
11 | 350,250
12 | 350,600
13 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt4.triangles:
--------------------------------------------------------------------------------
1 | 0.0,750.0 200.0,750.0 50.0,650.0
2 | 0.0,750.0 50.0,650.0 100.0,200.0
3 | 0.0,750.0 100.0,200.0 100.0,0.0
4 | 50.0,650.0 200.0,750.0 250.0,700.0
5 | 50.0,650.0 250.0,700.0 150.0,450.0
6 | 50.0,650.0 150.0,450.0 100.0,200.0
7 | 100.0,0.0 100.0,200.0 150.0,100.0
8 | 100.0,0.0 150.0,100.0 250.0,100.0
9 | 100.0,200.0 150.0,450.0 200.0,450.0
10 | 100.0,200.0 200.0,450.0 350.0,250.0
11 | 100.0,200.0 350.0,250.0 250.0,100.0
12 | 100.0,200.0 250.0,100.0 150.0,100.0
13 | 150.0,450.0 250.0,700.0 200.0,450.0
14 | 200.0,450.0 250.0,700.0 350.0,600.0
15 | 200.0,450.0 350.0,600.0 350.0,250.0
16 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt5.points:
--------------------------------------------------------------------------------
1 | 25,17
2 | 26,17
3 | 26,21
4 | 27,19
5 | 28,19
6 | 29,19
7 | 30,17
8 | 30,23
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt5.triangles:
--------------------------------------------------------------------------------
1 | 25,17 26,21 27,19
2 | 27,19 26,21 28,19
3 | 30,17 28,19 29,19
4 | 30,17 29,19 30,23
5 | 26,21 30,23 28,19
6 | 28,19 30,23 29,19
7 | 25,17 26,17 27,19
8 | 26,17 27,19 28,19
9 | 26,17 28,19 30,17
10 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt6.points:
--------------------------------------------------------------------------------
1 | 830,840
2 | 830,870
3 | 840,850
4 | 850,850
5 | 870,860
6 | 880,820
7 | 881,840
8 | 880,860
9 | 880,870
10 | 910,840
11 | 910,860
12 | 930,820
13 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt6.triangles:
--------------------------------------------------------------------------------
1 | 830,840 830,870 840,850
2 | 830,840 840,850 850,850
3 | 830,840 850,850 880,820
4 | 830,870 880,870 870,860
5 | 830,870 870,860 850,850
6 | 830,870 850,850 840,850
7 | 850,850 870,860 881,840
8 | 850,850 881,840 880,820
9 | 870,860 880,870 880,860
10 | 870,860 880,860 881,840
11 | 880,820 881,840 910,840
12 | 880,820 910,840 930,820
13 | 881,840 880,860 910,860
14 | 881,840 910,860 910,840
15 | 880,860 880,870 910,860
16 | 910,840 910,860 930,820
17 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt7.points:
--------------------------------------------------------------------------------
1 | 710,440
2 | 710,450
3 | 720,440
4 | 730,420
5 | 740,430
6 | 750,430
7 | 760,460
8 | 770,430
9 | 780,490
10 | 790,440
11 | 800,490
12 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt7.triangles:
--------------------------------------------------------------------------------
1 | 730,420 710,440 720,440
2 | 730,420 720,440 740,430
3 | 730,420 740,430 750,430
4 | 730,420 750,430 770,430
5 | 740,430 720,440 760,460
6 | 740,430 760,460 750,430
7 | 710,440 710,450 720,440
8 | 710,450 760,460 720,440
9 | 750,430 760,460 770,430
10 | 770,430 760,460 790,440
11 | 790,440 760,460 780,490
12 | 790,440 780,490 800,490
13 | 710,450 780,490 760,460
14 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt8.points:
--------------------------------------------------------------------------------
1 | 15,90
2 | 23,46
3 | 31,78
4 | 33,86
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt8.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt8.triangles:
--------------------------------------------------------------------------------
1 | 15,90 23,46 31,78
2 | 15,90 31,78 33,86
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt9.points:
--------------------------------------------------------------------------------
1 | 24,40
2 | 26,38
3 | 27,42
4 | 28,38
5 | 28,41
6 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt9.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/src/test/resources/Delaunay/test_dt9.triangles:
--------------------------------------------------------------------------------
1 | 24,40 27,42 26,38
2 | 26,38 27,42 28,41
3 | 26,38 28,41 28,38
4 |
--------------------------------------------------------------------------------
/src/test/resources/polys.osm:
--------------------------------------------------------------------------------
1 | 1 POLYGON((1 1, 2 1, 2 2, 1 2, 1 1))
2 | 2 EMPTY
3 |
--------------------------------------------------------------------------------
/src/test/resources/test.points:
--------------------------------------------------------------------------------
1 | 1,3
2 | 3,8
3 | 4,3
4 | 4,11
5 | 5,8
6 | 5,12
7 | 6,6
8 | 9,8
9 | 12,2
10 | 12,6
11 | 12,10
--------------------------------------------------------------------------------
/src/test/resources/test.rect:
--------------------------------------------------------------------------------
1 | 913,16,924,51
2 | 953,104,1000.0,116
3 | 200,728,210,767
4 | 557,137,619,166
5 | 387,717,468,788
6 | 557,668,584,725
7 | 277,145,324,246
8 | 784,981,830,1000.0
9 | 544,571,627,620
10 | 617,76,697,101
11 | 309,364,368,454
12 | 133,905,192,909
13 | 954,160,1000.0,239
14 | 585,760,655,767
15 |
--------------------------------------------------------------------------------
/src/test/resources/test2.points:
--------------------------------------------------------------------------------
1 | 1,3
2 | 3,8
3 | 4,3
4 | 4,11
5 | 5,8
6 | 5,12
7 | 6,6
8 | 9,8
9 | 12,2
10 | 12,6
11 | 12,10
12 | 11,3
13 | 13,8
14 | 14,3
15 | 14,11
16 | 15,8
17 | 15,12
18 | 16,6
19 | 19,8
20 | 22,2
21 | 22,6
22 | 22,10
--------------------------------------------------------------------------------
/src/test/resources/test3.points:
--------------------------------------------------------------------------------
1 | 1,2
2 | 2,8
3 | 3,4
4 | 4,11
5 | 6,9
6 | 5,12
7 | 7,5
8 | 8,6
9 | 13,3
10 | 12,7
--------------------------------------------------------------------------------
/src/test/resources/test4.points:
--------------------------------------------------------------------------------
1 | 101.7075756,3.2407152
2 | -4.3652792,55.8420373
3 | -94.72574289,38.95817385
4 | -51.0156588,-29.6313354
5 | -74.1029542,4.7358407
6 | 129.64155741,61.9750933
7 | 98.6366008,3.5731113
8 | -82.3542909,32.5865409
9 | -76.59426,39.3669394
10 | 131.92519757,43.12760003
11 | -117.8988082,33.79758228
12 | -72.6583878,41.69924422
13 | -0.45151666,49.3351327
14 | 136.8315923,35.2227054
--------------------------------------------------------------------------------