├── share ├── proj.db ├── GL27 ├── CH ├── ITRF2000 ├── proj.ini ├── ITRF2014 ├── other.extra ├── ITRF2008 ├── ITRF2020 ├── nad.lst ├── world └── triangulation.schema.json ├── internal ├── las │ ├── testdata │ │ ├── las-12-pf1.las │ │ └── las-12-pf2.las │ ├── golas │ │ ├── testdata │ │ │ ├── 1.1_0.las │ │ │ ├── 1.1_1.las │ │ │ ├── 1.2_0.las │ │ │ ├── 1.2_1.las │ │ │ ├── 1.2_2.las │ │ │ ├── 1.2_3.las │ │ │ ├── simple.las │ │ │ ├── epsg_4326.las │ │ │ ├── extrabytes.las │ │ │ ├── las-12-pf1.las │ │ │ ├── las-12-pf2.las │ │ │ ├── las-12-pf3.las │ │ │ ├── las-13-pf1.las │ │ │ ├── las-13-pf4.las │ │ │ ├── las-13-pf5.las │ │ │ ├── las-14-pf2.las │ │ │ ├── las-14-pf4.las │ │ │ ├── lots_of_vlr.las │ │ │ ├── test_utm16.las │ │ │ ├── test_utm17.las │ │ │ ├── 1.2-with-color.las │ │ │ ├── bad_vlr_count.las │ │ │ ├── las-14-pf5-sf.las │ │ │ ├── las-14-pf7-sf.las │ │ │ ├── synthetic_test.las │ │ │ ├── test_epsg_4047.las │ │ │ ├── 1.2-empty-geotiff-vlrs.las │ │ │ └── 1.2-with-color-clipped.las │ │ ├── util.go │ │ ├── geotiff.go │ │ ├── io.go │ │ └── point.go │ ├── mocks.go │ ├── reader_test.go │ └── reader.go ├── writer │ ├── testdata │ │ ├── content.glb │ │ └── content.pnts │ ├── work_unit.go │ ├── tileset_structs.go │ ├── mocks.go │ ├── producer.go │ ├── producer_test.go │ ├── writer.go │ ├── gltf.go │ ├── writer_test.go │ ├── consumer.go │ ├── pnts.go │ └── consumer_test.go ├── utils │ ├── test │ │ └── defaults.go │ ├── binary.go │ ├── binary_test.go │ ├── io_test.go │ ├── misc_test.go │ ├── io.go │ └── misc.go ├── conv │ └── coor │ │ ├── conv.go │ │ └── proj │ │ ├── proj.go │ │ └── proj_test.go ├── tree │ ├── grid │ │ └── bboxbuilder_test.go │ ├── mocks.go │ └── tree.go └── geom │ ├── point_test.go │ ├── bbox.go │ ├── utils.go │ ├── point.go │ ├── utils_test.go │ └── bbox_test.go ├── tiler ├── model │ ├── point_test.go │ ├── point.go │ ├── vector.go │ ├── transform.go │ ├── vector_test.go │ └── transform_test.go ├── mutator │ ├── zoffset.go │ ├── zoffset_test.go │ ├── pipeline.go │ ├── subsampler.go │ ├── mutator.go │ ├── subsampler_test.go │ └── pipeline_test.go ├── options_test.go ├── mocks.go ├── options.go ├── tiler_test.go └── tiler.go ├── .gitignore ├── go.mod ├── version └── version.go ├── go.sum ├── Dockerfile ├── cmd └── main_test.go └── DEVELOPMENT.md /share/proj.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/share/proj.db -------------------------------------------------------------------------------- /internal/las/testdata/las-12-pf1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/testdata/las-12-pf1.las -------------------------------------------------------------------------------- /internal/las/testdata/las-12-pf2.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/testdata/las-12-pf2.las -------------------------------------------------------------------------------- /internal/writer/testdata/content.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/writer/testdata/content.glb -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.1_0.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.1_0.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.1_1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.1_1.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2_0.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2_0.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2_1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2_1.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2_2.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2_2.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2_3.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2_3.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/simple.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/simple.las -------------------------------------------------------------------------------- /internal/writer/testdata/content.pnts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/writer/testdata/content.pnts -------------------------------------------------------------------------------- /internal/las/golas/testdata/epsg_4326.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/epsg_4326.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/extrabytes.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/extrabytes.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-12-pf1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-12-pf1.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-12-pf2.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-12-pf2.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-12-pf3.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-12-pf3.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-13-pf1.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-13-pf1.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-13-pf4.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-13-pf4.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-13-pf5.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-13-pf5.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-14-pf2.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-14-pf2.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-14-pf4.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-14-pf4.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/lots_of_vlr.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/lots_of_vlr.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/test_utm16.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/test_utm16.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/test_utm17.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/test_utm17.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2-with-color.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2-with-color.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/bad_vlr_count.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/bad_vlr_count.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-14-pf5-sf.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-14-pf5-sf.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/las-14-pf7-sf.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/las-14-pf7-sf.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/synthetic_test.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/synthetic_test.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/test_epsg_4047.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/test_epsg_4047.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2-empty-geotiff-vlrs.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2-empty-geotiff-vlrs.las -------------------------------------------------------------------------------- /internal/las/golas/testdata/1.2-with-color-clipped.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mfbonfigli/gocesiumtiler/HEAD/internal/las/golas/testdata/1.2-with-color-clipped.las -------------------------------------------------------------------------------- /tiler/model/point_test.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import "testing" 4 | 5 | func TestPointVector(t *testing.T) { 6 | p := Point{ 7 | X: 123.5, 8 | Y: 121.5, 9 | Z: -4986.5, 10 | } 11 | expected := Vector{ 12 | X: 123.5, 13 | Y: 121.5, 14 | Z: -4986.5, 15 | } 16 | if actual := p.Vector(); actual != expected { 17 | t.Errorf("expected vector %v, got %v", expected, actual) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and plugins 2 | *.exe 3 | *.exe~ 4 | *.dll 5 | *.so 6 | *.dylib 7 | 8 | # Test binary, built with `go test -c` 9 | *.test 10 | 11 | # Output of the go coverage tool, specifically when used with LiteIDE 12 | *.out 13 | 14 | # Dependency directories (remove the comment below to include it) 15 | # vendor/ 16 | .idea/workspace.xml 17 | .idea/encodings.xml 18 | output/ 19 | build/ 20 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/mfbonfigli/gocesiumtiler/v2 2 | 3 | go 1.22 4 | 5 | toolchain go1.22.2 6 | 7 | require ( 8 | github.com/qmuntal/gltf v0.25.0 9 | github.com/twpayne/go-proj/v10 v10.4.0 10 | github.com/urfave/cli/v2 v2.27.2 11 | ) 12 | 13 | require ( 14 | github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect 15 | github.com/russross/blackfriday/v2 v2.1.0 // indirect 16 | github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect 17 | ) 18 | -------------------------------------------------------------------------------- /internal/utils/test/defaults.go: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import ( 4 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor" 5 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor/proj" 6 | ) 7 | 8 | // GetTestCoordinateConverter returns the function to use to convert coordinates in tests 9 | func GetTestCoordinateConverterFactory() coor.ConverterFactory { 10 | return func() (coor.Converter, error) { 11 | return proj.NewProjCoordinateConverter() 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /internal/writer/work_unit.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 5 | ) 6 | 7 | // WorkUnit contains the minimal data needed to produce a single 3d tile, i.e. 8 | // a binary content.pnts file, a tileset.json file 9 | type WorkUnit struct { 10 | // Node contains the data for the current tile 11 | Node tree.Node 12 | // BasePath is the path of the folder where to write the content.pnts and tileset.json files for this workunit 13 | BasePath string 14 | } 15 | -------------------------------------------------------------------------------- /tiler/mutator/zoffset.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 4 | 5 | // ZOffset is a mutator that shifts the points vertically for the given offset 6 | type ZOffset struct { 7 | Offset float32 8 | } 9 | 10 | func NewZOffset(offset float32) *ZOffset { 11 | return &ZOffset{ 12 | Offset: offset, 13 | } 14 | } 15 | 16 | func (z *ZOffset) Mutate(pt model.Point, localToGlobal model.Transform) (model.Point, bool) { 17 | pt.Z += z.Offset 18 | return pt, true 19 | } 20 | -------------------------------------------------------------------------------- /internal/conv/coor/conv.go: -------------------------------------------------------------------------------- 1 | package coor 2 | 3 | import ( 4 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 5 | ) 6 | 7 | type Converter interface { 8 | Transform(sourceCRS string, targetCRS string, coord model.Vector) (model.Vector, error) 9 | ToWGS84Cartesian(sourceCRS string, coord model.Vector) (model.Vector, error) 10 | Cleanup() 11 | } 12 | 13 | // ConverterFactory returns a new CoordinateConverter that should only be used in the same goroutine 14 | // to avoid race conditions 15 | type ConverterFactory func() (Converter, error) 16 | -------------------------------------------------------------------------------- /version/version.go: -------------------------------------------------------------------------------- 1 | package version 2 | 3 | type TilesetVersion string 4 | 5 | const ( 6 | TilesetVersion_1_0 TilesetVersion = "1.0" 7 | TilesetVersion_1_1 TilesetVersion = "1.1" 8 | ) 9 | 10 | func (v TilesetVersion) String() string { 11 | return string(v) 12 | } 13 | 14 | func Parse(s string) (TilesetVersion, bool) { 15 | switch s { 16 | case string(TilesetVersion_1_0): 17 | return TilesetVersion_1_0, true 18 | case string(TilesetVersion_1_1): 19 | return TilesetVersion_1_1, true 20 | default: 21 | return "", false 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tiler/mutator/zoffset_test.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | ) 9 | 10 | func TestZOffset(t *testing.T) { 11 | actual, keep := NewZOffset(2).Mutate(geom.NewPoint(1, 2, 3, 1, 2, 3, 4, 5), model.Transform{}) 12 | expected := geom.NewPoint(1, 2, 5, 1, 2, 3, 4, 5) 13 | if actual != expected { 14 | t.Errorf("expected %v, got %v", expected, actual) 15 | } 16 | if !keep { 17 | t.Errorf("expected keep to be true but is false") 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tiler/model/point.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | // Point models a point cloud point expressed in local, single precision, coordinates 4 | type Point struct { 5 | X float32 6 | Y float32 7 | Z float32 8 | R uint8 9 | G uint8 10 | B uint8 11 | Intensity uint8 12 | Classification uint8 13 | } 14 | 15 | // Vector returns a Vector representation of the position of the point in the local coordinate space 16 | func (p Point) Vector() Vector { 17 | return Vector{ 18 | X: float64(p.X), 19 | Y: float64(p.Y), 20 | Z: float64(p.Z), 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /tiler/mutator/pipeline.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 5 | ) 6 | 7 | // Pipeline is a mutator that applies all registered mutators sequentially 8 | // and returns the result as output 9 | type Pipeline struct { 10 | mutators []Mutator 11 | } 12 | 13 | func NewPipeline(m ...Mutator) *Pipeline { 14 | return &Pipeline{ 15 | mutators: m, 16 | } 17 | } 18 | 19 | func (p *Pipeline) Mutate(pt model.Point, localToGlobal model.Transform) (model.Point, bool) { 20 | for _, m := range p.mutators { 21 | keep := true 22 | pt, keep = m.Mutate(pt, localToGlobal) 23 | if !keep { 24 | return pt, false 25 | } 26 | } 27 | return pt, true 28 | } 29 | -------------------------------------------------------------------------------- /internal/utils/binary.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "encoding/binary" 5 | "io" 6 | "math" 7 | ) 8 | 9 | // Writes the 4 byte array corresponding the the given int value to the given reader 10 | func WriteIntAs4ByteNumber(i int, w io.Writer) error { 11 | b := make([]uint8, 4) 12 | b[0] = uint8(i) 13 | b[1] = uint8(i >> 8) 14 | b[2] = uint8(i >> 16) 15 | b[3] = uint8(i >> 24) 16 | _, err := w.Write(b) 17 | return err 18 | } 19 | 20 | // WriteFloat32LittleEndian writes a float32 number as a float32 21 | // in little endian notation to the given writer 22 | func WriteFloat32LittleEndian(n float32, w io.Writer) error { 23 | bytes := make([]byte, 4) 24 | binary.LittleEndian.PutUint32(bytes, math.Float32bits(n)) 25 | _, err := w.Write(bytes) 26 | return err 27 | } 28 | -------------------------------------------------------------------------------- /internal/las/mocks.go: -------------------------------------------------------------------------------- 1 | package las 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 7 | ) 8 | 9 | type MockLasReader struct { 10 | Cur int 11 | Pts []geom.Point64 12 | CRS string 13 | CloseCalled bool 14 | } 15 | 16 | // NumberOfPoints returns the number of points stored in the LAS file 17 | func (m *MockLasReader) NumberOfPoints() int { 18 | return len(m.Pts) 19 | } 20 | func (m *MockLasReader) GetCRS() string { 21 | return m.CRS 22 | } 23 | func (m *MockLasReader) GetNext() (geom.Point64, error) { 24 | if m.Cur < len(m.Pts) { 25 | m.Cur++ 26 | return m.Pts[m.Cur-1], nil 27 | } 28 | return geom.Point64{}, fmt.Errorf("point not available") 29 | } 30 | func (m *MockLasReader) Close() { 31 | m.CloseCalled = true 32 | } 33 | -------------------------------------------------------------------------------- /share/GL27: -------------------------------------------------------------------------------- 1 | # SCCSID @(#)GL27 1.1 93/08/25 GIE REL 2 | # Great Lakes Grids 3 | +lastupdate=1993-08-25 4 | # Lake Erie, Ontario and St. Lawrence River. 5 | proj=omerc ellps=clrk66 k_0=0.9999 6 | lonc=78d00'W lat_0=44d00'N alpha=55d40' 7 | x_0=-3950000 y_0=-3430000 8 | no_defs <> 9 | # Lake Huron 10 | proj=omerc ellps=clrk66 k_0=0.9999 11 | lonc=82d00'W lat_0=43d00'N alpha=350d37' 12 | x_0=1200000 y_0=-3500000 13 | no_defs <> 14 | # Lake Michigan 15 | proj=omerc ellps=clrk66 k_0=0.9999 16 | lonc=87d00'W lat_0=44d00'N alpha=15d00' 17 | x_0=-1000000 y_0=-4300000 18 | no_defs <> 19 | # Lake Superior, Lake of the Woods 20 | proj=omerc ellps=clrk66 k_0=0.9999 21 | lonc=88d50'0.256"W lat_0=47d12'21.554"N alpha=285d41'42.593" 22 | x_0=9000000 y_0=-1600000 23 | no_defs <> 24 | -------------------------------------------------------------------------------- /internal/utils/binary_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "reflect" 7 | "testing" 8 | ) 9 | 10 | func TestWriteIntAs4ByteNumber(t *testing.T) { 11 | var b bytes.Buffer 12 | w := bufio.NewWriter(&b) 13 | 14 | err := WriteIntAs4ByteNumber(123456789, w) 15 | if err != nil { 16 | t.Fatalf("unexpected err %v", err) 17 | } 18 | w.Flush() 19 | if !reflect.DeepEqual(b.Bytes(), []byte{21, 205, 91, 7}) { 20 | t.Errorf("%v", b.Bytes()) 21 | } 22 | } 23 | 24 | func TestWriteFloat32LittleEndian(t *testing.T) { 25 | var b bytes.Buffer 26 | w := bufio.NewWriter(&b) 27 | 28 | err := WriteFloat32LittleEndian(123456789, w) 29 | if err != nil { 30 | t.Fatalf("unexpected err %v", err) 31 | } 32 | w.Flush() 33 | if !reflect.DeepEqual(b.Bytes(), []byte{163, 121, 235, 76}) { 34 | t.Errorf("%v", b) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /tiler/mutator/subsampler.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "math/rand" 5 | "sync/atomic" 6 | 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | ) 9 | 10 | type Subsampler struct { 11 | Percentage float64 12 | first *atomic.Bool 13 | } 14 | 15 | func NewSubsampler(percentage float64) *Subsampler { 16 | first := atomic.Bool{} 17 | first.Store(true) 18 | return &Subsampler{ 19 | Percentage: percentage, 20 | first: &first, 21 | } 22 | } 23 | 24 | func (s *Subsampler) Mutate(pt model.Point, localToGlobal model.Transform) (model.Point, bool) { 25 | if s.first.Load() { 26 | // always take the first point to ensure the point cloud has at least one point 27 | s.first.Swap(false) 28 | return pt, true 29 | } 30 | if rand.Float64() < s.Percentage { 31 | return pt, true 32 | } 33 | return pt, false 34 | } 35 | -------------------------------------------------------------------------------- /tiler/mutator/mutator.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 5 | ) 6 | 7 | // Mutator defines a generic interface to manipulate coordinates or attributes of points. 8 | type Mutator interface { 9 | // Mutate transforms or discards the points in input. 10 | // 11 | // The function receives in input the point, with coordinates expressed in 12 | // the local CRS with Z-up, and a transform object that can be used to 13 | // forward transform from the local CRS to the global EPSG 4978 CRS and inverse transform 14 | // from the global CRS to the local CRS. 15 | // 16 | // The function returns the manipulated point and true if the point is to be used 17 | // or false if the point should be discarded from the final point cloud 18 | Mutate(pt model.Point, localToGlobal model.Transform) (model.Point, bool) 19 | } 20 | -------------------------------------------------------------------------------- /tiler/model/vector.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import "math" 4 | 5 | // Vector represents a Vector in a 3D space with double precision components 6 | type Vector struct { 7 | X float64 8 | Y float64 9 | Z float64 10 | } 11 | 12 | // Unit returns the unit vector with same direction as the vector 13 | func (v Vector) Unit() Vector { 14 | n := v.Norm() 15 | return Vector{ 16 | X: v.X / n, 17 | Y: v.Y / n, 18 | Z: v.Z / n, 19 | } 20 | } 21 | 22 | // Norm return the euclidean norm of the vector 23 | func (v Vector) Norm() float64 { 24 | return math.Sqrt(math.Pow(v.X, 2) + math.Pow(v.Y, 2) + math.Pow(v.Z, 2)) 25 | } 26 | 27 | // Cross returns the result of the cross product with the vector passed as input 28 | func (v Vector) Cross(w Vector) Vector { 29 | return Vector{ 30 | X: v.Y*w.Z - v.Z*w.Y, 31 | Y: v.Z*w.X - v.X*w.Z, 32 | Z: v.X*w.Y - v.Y*w.X, 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /internal/utils/io_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "os" 5 | "path/filepath" 6 | "reflect" 7 | "testing" 8 | ) 9 | 10 | func TestFindLasFilesInFolder(t *testing.T) { 11 | tmp, err := os.MkdirTemp(os.TempDir(), "tst") 12 | if err != nil { 13 | t.Fatalf("unexpected error %v", err) 14 | } 15 | t.Cleanup(func() { 16 | os.RemoveAll(tmp) 17 | }) 18 | 19 | TouchFile(filepath.Join(tmp, "test0.las")) 20 | TouchFile(filepath.Join(tmp, "test0.xyz")) 21 | TouchFile(filepath.Join(tmp, "test1.LAS")) 22 | TouchFile(filepath.Join(tmp, "test2.LAS")) 23 | 24 | files, err := FindLasFilesInFolder(tmp) 25 | if err != nil { 26 | t.Errorf("unexpected error: %v", err) 27 | } 28 | expected := []string{ 29 | filepath.Join(tmp, "test0.las"), 30 | filepath.Join(tmp, "test1.LAS"), 31 | filepath.Join(tmp, "test2.LAS"), 32 | } 33 | if !reflect.DeepEqual(expected, files) { 34 | t.Errorf("expected %v got %v", expected, files) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /internal/utils/misc_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 7 | ) 8 | 9 | func TestCompareWithTolerance(t *testing.T) { 10 | diff, err := CompareWithTolerance(1, 2, 3) 11 | if err != nil { 12 | t.Errorf("unexpected err %v", err) 13 | } 14 | if diff != 1 { 15 | t.Errorf("expected diff %f, got %f", 1.0, diff) 16 | } 17 | 18 | diff, err = CompareWithTolerance(1, 2, 0.5) 19 | if err == nil { 20 | t.Errorf("expected error but got none") 21 | } 22 | if diff != 1 { 23 | t.Errorf("expected diff %f, got %f", 1.0, diff) 24 | } 25 | } 26 | 27 | func TestCompareCoord(t *testing.T) { 28 | actual := model.Vector{X: 1, Y: 1, Z: 1} 29 | reference := model.Vector{X: 2, Y: 3, Z: 4} 30 | err := CompareCoord(actual, reference, 5) 31 | if err != nil { 32 | t.Errorf("unexpected err %v", err) 33 | } 34 | err = CompareCoord(actual, reference, 1) 35 | if err == nil { 36 | t.Errorf("expected error but got none") 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tiler/mutator/subsampler_test.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 7 | ) 8 | 9 | func TestSubsample(t *testing.T) { 10 | s := NewSubsampler(0.1) 11 | pt := model.Point{X: 1, Y: 2, Z: 3} 12 | out, keep := s.Mutate(pt, model.Transform{}) 13 | if !keep { 14 | // first point should always be kept 15 | t.Error("expected first Mutated point to be kept but was not") 16 | } 17 | if out != pt { 18 | t.Errorf("expected point %v, got %v", pt, out) 19 | } 20 | 21 | samples := 100000 22 | kept := 0 23 | for i := 0; i < samples; i++ { 24 | out, keep := s.Mutate(pt, model.Transform{}) 25 | if keep { 26 | kept++ 27 | if out != pt { 28 | t.Errorf("expected point %v, got %v", pt, out) 29 | } 30 | } 31 | } 32 | // approximately 10000 pts should have been kept (0.1 or 10%) 33 | if kept < 9000 || kept > 11000 { 34 | t.Errorf("expected approx. %d of samples to be kept but %d were kept", samples/10, kept) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /internal/writer/tileset_structs.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import "github.com/mfbonfigli/gocesiumtiler/v2/version" 4 | 5 | type Asset struct { 6 | Version version.TilesetVersion `json:"version"` 7 | } 8 | 9 | type Content struct { 10 | Url string `json:"uri"` 11 | } 12 | 13 | type BoundingVolume struct { 14 | Box [12]float64 `json:"box"` 15 | } 16 | 17 | type Child struct { 18 | Content Content `json:"content"` 19 | BoundingVolume BoundingVolume `json:"boundingVolume"` 20 | GeometricError float64 `json:"geometricError"` 21 | Refine string `json:"refine"` 22 | } 23 | 24 | type Root struct { 25 | Children []Child `json:"children"` 26 | Content Content `json:"content"` 27 | BoundingVolume BoundingVolume `json:"boundingVolume"` 28 | GeometricError float64 `json:"geometricError"` 29 | Refine string `json:"refine"` 30 | Transform *[16]float64 `json:"transform,omitempty"` 31 | } 32 | 33 | type Tileset struct { 34 | Asset Asset `json:"asset"` 35 | GeometricError float64 `json:"geometricError"` 36 | Root Root `json:"root"` 37 | } 38 | -------------------------------------------------------------------------------- /share/CH: -------------------------------------------------------------------------------- 1 | # This init file provides definitions for CH1903 and CH1903/LV03 2 | # projections using the distortion grids developed by Swisstopo. 3 | # See: https://shop.swisstopo.admin.ch/en/products/geo_software/GIS_info 4 | # 5 | # You'll need to download the grids separately and put in a directory 6 | # scanned by libproj. 7 | # 8 | # Note that an independent effort was made to derive an usable grid 9 | # from the CH1903->CH1903+ grid initially available from the Swisstopo 10 | # website. You can read about this other effort here: 11 | # http://lists.maptools.org/pipermail/proj/2012-February/006093.html 12 | # It may be of interest because the latter was by some reported as being 13 | # more accurate than the former: 14 | # http://lists.maptools.org/pipermail/proj/2012-February/006119.html 15 | # 16 | # This init file uses the official one 17 | # 18 | +origin=Swisstopo +lastupdate=2012-02-27 19 | # CH1903/LV03 20 | <1903_LV03> +proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel +units=m +nadgrids=CHENyx06_ETRS.gsb +no_defs 21 | # CH1903 22 | <1903> +proj=longlat +ellps=bessel +nadgrids=CHENyx06_ETRS.gsb +no_defs <> 23 | -------------------------------------------------------------------------------- /internal/utils/io.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "os" 5 | "path/filepath" 6 | "strings" 7 | ) 8 | 9 | func CreateDirectoryIfDoesNotExist(directory string) error { 10 | if _, err := os.Stat(directory); os.IsNotExist(err) { 11 | err := os.MkdirAll(directory, 0777) 12 | if err != nil { 13 | return err 14 | } 15 | } 16 | return nil 17 | } 18 | 19 | func TouchFile(path string) error { 20 | f, err := os.Create(path) 21 | if err != nil { 22 | return err 23 | } 24 | return f.Close() 25 | } 26 | 27 | func FindLasFilesInFolder(directory string) ([]string, error) { 28 | if _, err := os.Stat(directory); err != nil { 29 | return nil, err 30 | } 31 | entries, err := os.ReadDir(directory) 32 | if err != nil { 33 | return nil, err 34 | } 35 | files := []string{} 36 | for _, e := range entries { 37 | if e.IsDir() { 38 | continue 39 | } 40 | lastIndex := -1 41 | name := e.Name() 42 | if lastIndex = strings.LastIndex(name, "."); lastIndex != -1 { 43 | ext := e.Name()[lastIndex+1:] 44 | if strings.ToLower(ext) != "las" { 45 | continue 46 | } 47 | } 48 | f := filepath.Join(directory, e.Name()) 49 | files = append(files, f) 50 | } 51 | return files, nil 52 | } 53 | -------------------------------------------------------------------------------- /tiler/mutator/pipeline_test.go: -------------------------------------------------------------------------------- 1 | package mutator 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | ) 9 | 10 | type discardMutator struct{} 11 | 12 | func (p *discardMutator) Mutate(pt model.Point, t model.Transform) (model.Point, bool) { 13 | return pt, false 14 | } 15 | 16 | func TestPipeline(t *testing.T) { 17 | p := NewPipeline( 18 | NewZOffset(1.5), 19 | NewZOffset(2.5), 20 | ) 21 | actual, keep := p.Mutate(geom.NewPoint(1, 2, 3, 1, 2, 3, 4, 5), model.Transform{}) 22 | expected := geom.NewPoint(1, 2, 7, 1, 2, 3, 4, 5) 23 | if actual != expected { 24 | t.Errorf("expected %v, got %v", expected, actual) 25 | } 26 | if !keep { 27 | t.Errorf("expected keep to be true but is false") 28 | } 29 | } 30 | 31 | func TestPipelineDiscard(t *testing.T) { 32 | p := NewPipeline( 33 | NewZOffset(1.5), 34 | &discardMutator{}, 35 | NewZOffset(2.5), 36 | ) 37 | actual, keep := p.Mutate(geom.NewPoint(1, 2, 3, 1, 2, 3, 4, 5), model.Transform{}) 38 | expected := geom.NewPoint(1, 2, 4.5, 1, 2, 3, 4, 5) 39 | if actual != expected { 40 | t.Errorf("expected %v, got %v", expected, actual) 41 | } 42 | if keep { 43 | t.Errorf("expected point to be discarded but was not") 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /tiler/options_test.go: -------------------------------------------------------------------------------- 1 | package tiler 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 7 | ) 8 | 9 | func TestOptions(t *testing.T) { 10 | m := mutator.NewSubsampler(0.5) 11 | opts := NewTilerOptions( 12 | WithCallback(func(event TilerEvent, filename string, elapsed int64, msg string) {}), 13 | WithEightBitColors(true), 14 | WithGridSize(11.1), 15 | WithMaxDepth(12), 16 | WithMinPointsPerTile(10), 17 | WithWorkerNumber(3), 18 | WithMutators([]mutator.Mutator{m}), 19 | ) 20 | 21 | if opts.callback == nil { 22 | t.Errorf("unexpected nil callback") 23 | } 24 | if opts.eightBitColors != true { 25 | t.Errorf("expected eightbitcolor to be %v got %v", true, opts.eightBitColors) 26 | } 27 | if opts.gridSize != 11.1 { 28 | t.Errorf("expected gridSize to be %v got %v", 11.1, opts.gridSize) 29 | } 30 | if opts.maxDepth != 12 { 31 | t.Errorf("expected maxDepth to be %v got %v", 12, opts.maxDepth) 32 | } 33 | if opts.minPointsPerTile != 10 { 34 | t.Errorf("expected minPointsPerTile to be %v got %v", 10, opts.minPointsPerTile) 35 | } 36 | if opts.numWorkers != 3 { 37 | t.Errorf("expected numWorkers to be %v got %v", 3, opts.numWorkers) 38 | } 39 | if opts.mutators[0] != m && len(opts.mutators) != 1 { 40 | t.Error("expected 1 mutator to be registered") 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /internal/utils/misc.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "math" 6 | 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | ) 9 | 10 | func CompareWithTolerance(actual, expected, tolerance float64) (diff float64, err error) { 11 | if math.IsNaN(actual) { 12 | return math.NaN(), fmt.Errorf("number is NaN") 13 | } 14 | diff = math.Abs(actual - expected) 15 | if diff > math.Abs(tolerance) { 16 | err = fmt.Errorf("expected value to be within %f from %f, but got %f", tolerance, expected, actual) 17 | } 18 | return diff, err 19 | } 20 | 21 | func CompareCoord(actual model.Vector, expected model.Vector, tolerance float64) error { 22 | if diff, err := CompareWithTolerance(actual.X, expected.X, tolerance); err != nil { 23 | return fmt.Errorf("failed tolerance check on X coordinate, expected error less than %f but got %f error", tolerance, diff) 24 | } 25 | if diff, err := CompareWithTolerance(actual.Y, expected.Y, tolerance); err != nil { 26 | return fmt.Errorf("failed tolerance check on Y coordinate, expected error less than %f but got %f error", tolerance, diff) 27 | } 28 | if diff, err := CompareWithTolerance(actual.Z, expected.Z, tolerance); err != nil { 29 | return fmt.Errorf("failed tolerance check on Z coordinate, expected error less than %f but got %f error", tolerance, diff) 30 | } 31 | return nil 32 | } 33 | -------------------------------------------------------------------------------- /internal/writer/mocks.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "context" 5 | "sync" 6 | 7 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 8 | ) 9 | 10 | type MockProducer struct { 11 | Wc chan *WorkUnit 12 | Ec chan error 13 | Err error 14 | Wu *WorkUnit 15 | } 16 | 17 | func (m *MockProducer) Produce(workchan chan *WorkUnit, errchan chan error, wg *sync.WaitGroup, node tree.Node, ctx context.Context) { 18 | m.Wc = workchan 19 | m.Ec = errchan 20 | defer close(workchan) 21 | if m.Err != nil { 22 | errchan <- m.Err 23 | } else if m.Wu != nil { 24 | workchan <- m.Wu 25 | } 26 | wg.Done() 27 | } 28 | 29 | type MockConsumer struct { 30 | Wc chan *WorkUnit 31 | Ec chan error 32 | Err error 33 | } 34 | 35 | func (m *MockConsumer) Consume(workchan chan *WorkUnit, errchan chan error, waitGroup *sync.WaitGroup) { 36 | defer waitGroup.Done() 37 | m.Wc = workchan 38 | m.Ec = errchan 39 | if m.Err != nil { 40 | errchan <- m.Err 41 | } 42 | } 43 | 44 | type MockWriter struct { 45 | Err error 46 | Tr tree.Tree 47 | FolderName string 48 | Ctx context.Context 49 | WriteCalled bool 50 | } 51 | 52 | func (m *MockWriter) Write(t tree.Tree, folderName string, ctx context.Context) error { 53 | m.WriteCalled = true 54 | m.Tr = t 55 | m.FolderName = folderName 56 | m.Ctx = ctx 57 | return m.Err 58 | } 59 | -------------------------------------------------------------------------------- /internal/tree/grid/bboxbuilder_test.go: -------------------------------------------------------------------------------- 1 | package grid 2 | 3 | import "testing" 4 | 5 | func TestBBoxBuilderMergeWith(t *testing.T) { 6 | u := newBoundingBoxBuilder() 7 | v := newBoundingBoxBuilder() 8 | u.processPoint(1, 2, 3) 9 | u.processPoint(4, 5, 6) 10 | v.processPoint(2, -1, 2) 11 | v.processPoint(5, 4, 7) 12 | 13 | u.mergeWith(v) 14 | u.build() 15 | 16 | if actual := u.minX; actual != 1 { 17 | t.Errorf("expected minx %v, got %v", 1, actual) 18 | } 19 | if actual := u.minY; actual != -1 { 20 | t.Errorf("expected miny %v, got %v", -1, actual) 21 | } 22 | if actual := u.minZ; actual != 2 { 23 | t.Errorf("expected minz %v, got %v", 2, actual) 24 | } 25 | if actual := u.maxX; actual != 5 { 26 | t.Errorf("expected maxx %v, got %v", 5, actual) 27 | } 28 | if actual := u.maxY; actual != 5 { 29 | t.Errorf("expected maxy %v, got %v", 5, actual) 30 | } 31 | if actual := u.maxZ; actual != 7 { 32 | t.Errorf("expected maxz %v, got %v", 7, actual) 33 | } 34 | v.processPoint(-1, 10, 6) 35 | u.mergeWith(v) 36 | if actual := u.minX; actual != -1 { 37 | t.Errorf("expected minx %v, got %v", -1, actual) 38 | } 39 | if actual := u.minY; actual != -1 { 40 | t.Errorf("expected miny %v, got %v", -1, actual) 41 | } 42 | if actual := u.minZ; actual != 2 { 43 | t.Errorf("expected minz %v, got %v", 2, actual) 44 | } 45 | if actual := u.maxX; actual != 5 { 46 | t.Errorf("expected maxx %v, got %v", 5, actual) 47 | } 48 | if actual := u.maxY; actual != 10 { 49 | t.Errorf("expected maxy %v, got %v", 10, actual) 50 | } 51 | if actual := u.maxZ; actual != 7 { 52 | t.Errorf("expected maxz %v, got %v", 7, actual) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /internal/geom/point_test.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | import "testing" 4 | 5 | func TestLinkedPointStream(t *testing.T) { 6 | pt1 := &LinkedPoint{ 7 | Pt: NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 8 | } 9 | pt2 := &LinkedPoint{ 10 | Pt: NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 11 | } 12 | pt3 := &LinkedPoint{ 13 | Pt: NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 14 | } 15 | pt1.Next = pt2 16 | pt2.Next = pt3 17 | 18 | stream := NewLinkedPointStream(pt1, 3) 19 | 20 | if actual := stream.Len(); actual != 3 { 21 | t.Errorf("expected Len %d got %d", 3, actual) 22 | } 23 | 24 | if actual, err := stream.Next(); actual != pt1.Pt || err != nil { 25 | if err == nil { 26 | t.Errorf("expected point %v got %v", pt1.Pt, actual) 27 | } else { 28 | t.Errorf("unexpected error %v", err) 29 | 30 | } 31 | } 32 | 33 | if actual, err := stream.Next(); actual != pt2.Pt || err != nil { 34 | if err == nil { 35 | t.Errorf("expected point %v got %v", pt2.Pt, actual) 36 | } else { 37 | t.Errorf("unexpected error %v", err) 38 | 39 | } 40 | } 41 | 42 | if actual, err := stream.Next(); actual != pt3.Pt || err != nil { 43 | if err == nil { 44 | t.Errorf("expected point %v got %v", pt3.Pt, actual) 45 | } else { 46 | t.Errorf("unexpected error %v", err) 47 | 48 | } 49 | } 50 | 51 | if _, err := stream.Next(); err == nil { 52 | t.Errorf("expected error but got none error %v", err) 53 | } 54 | 55 | stream.Reset() 56 | 57 | if actual, err := stream.Next(); actual != pt1.Pt || err != nil { 58 | if err == nil { 59 | t.Errorf("expected point %v got %v", pt1.Pt, actual) 60 | } else { 61 | t.Errorf("unexpected error %v", err) 62 | 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /internal/geom/bbox.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | type BoundingBox struct { 4 | Xmin, Xmax, Ymin, Ymax, Zmin, Zmax, Xmid, Ymid, Zmid float64 5 | } 6 | 7 | // Constructor to properly initialize a boundingBox struct computing the mids 8 | func NewBoundingBox(Xmin, Xmax, Ymin, Ymax, Zmin, Zmax float64) BoundingBox { 9 | bbox := BoundingBox{ 10 | Xmin: Xmin, 11 | Xmax: Xmax, 12 | Ymin: Ymin, 13 | Ymax: Ymax, 14 | Zmin: Zmin, 15 | Zmax: Zmax, 16 | Xmid: (Xmin + Xmax) / 2, 17 | Ymid: (Ymin + Ymax) / 2, 18 | Zmid: (Zmin + Zmax) / 2, 19 | } 20 | return bbox 21 | } 22 | 23 | // Computes a bounding box from the given box and the given octant index 24 | func NewBoundingBoxFromParent(parent BoundingBox, octant int) BoundingBox { 25 | var xMin, xMax, yMin, yMax, zMin, zMax float64 26 | switch octant { 27 | case 0, 2, 4, 6: 28 | xMin = parent.Xmin 29 | xMax = parent.Xmid 30 | case 1, 3, 5, 7: 31 | xMin = parent.Xmid 32 | xMax = parent.Xmax 33 | } 34 | switch octant { 35 | case 0, 1, 4, 5: 36 | yMin = parent.Ymin 37 | yMax = parent.Ymid 38 | case 2, 3, 6, 7: 39 | yMin = parent.Ymid 40 | yMax = parent.Ymax 41 | } 42 | switch octant { 43 | case 0, 1, 2, 3: 44 | zMin = parent.Zmin 45 | zMax = parent.Zmid 46 | case 4, 5, 6, 7: 47 | zMin = parent.Zmid 48 | zMax = parent.Zmax 49 | } 50 | return NewBoundingBox(xMin, xMax, yMin, yMax, zMin, zMax) 51 | } 52 | 53 | // AsCesiumBox returns the bounding box expressed according to the cesium "box" format 54 | func (b BoundingBox) AsCesiumBox() [12]float64 { 55 | return [12]float64{ 56 | b.Xmid, b.Ymid, b.Zmid, 57 | (b.Xmax - b.Xmid), 0, 0, 58 | 0, (b.Ymax - b.Ymid), 0, 59 | 0, 0, (b.Zmax - b.Zmid), 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /internal/las/golas/util.go: -------------------------------------------------------------------------------- 1 | package golas 2 | 3 | var scanAngleRankFormats map[uint8]any = map[uint8]any{ 4 | 0: nil, 5 | 1: nil, 6 | 2: nil, 7 | 3: nil, 8 | 4: nil, 9 | 5: nil, 10 | } 11 | 12 | var scanAngleFormats map[uint8]any = map[uint8]any{ 13 | 6: nil, 14 | 7: nil, 15 | 8: nil, 16 | 9: nil, 17 | 10: nil, 18 | } 19 | 20 | var gpsTimeFormats map[uint8]any = map[uint8]any{ 21 | 1: nil, 22 | 3: nil, 23 | 4: nil, 24 | 5: nil, 25 | 6: nil, 26 | 7: nil, 27 | 8: nil, 28 | 9: nil, 29 | 10: nil, 30 | } 31 | 32 | var rgbTimeFormats map[uint8]any = map[uint8]any{ 33 | 2: nil, 34 | 3: nil, 35 | 5: nil, 36 | 7: nil, 37 | 8: nil, 38 | 10: nil, 39 | } 40 | 41 | var wavePacketsFormats map[uint8]any = map[uint8]any{ 42 | 4: nil, 43 | 5: nil, 44 | 9: nil, 45 | 10: nil, 46 | } 47 | 48 | var extraFlagByteFormats map[uint8]any = map[uint8]any{ 49 | 6: nil, 50 | 7: nil, 51 | 8: nil, 52 | 9: nil, 53 | 10: nil, 54 | } 55 | 56 | var nirFormats map[uint8]any = map[uint8]any{ 57 | 8: nil, 58 | 10: nil, 59 | } 60 | 61 | func formatHasGpsTime(f uint8) bool { 62 | _, ok := gpsTimeFormats[f] 63 | return ok 64 | } 65 | 66 | func formatHasRgbColors(f uint8) bool { 67 | _, ok := rgbTimeFormats[f] 68 | return ok 69 | } 70 | 71 | func formatHasWavePackets(f uint8) bool { 72 | _, ok := wavePacketsFormats[f] 73 | return ok 74 | } 75 | 76 | func formatHasScanAngleRank(f uint8) bool { 77 | _, ok := scanAngleRankFormats[f] 78 | return ok 79 | } 80 | 81 | func formatHasScanAngle(f uint8) bool { 82 | _, ok := scanAngleFormats[f] 83 | return ok 84 | } 85 | 86 | func formatHasExtraFlagByte(f uint8) bool { 87 | _, ok := extraFlagByteFormats[f] 88 | return ok 89 | } 90 | 91 | func formatHasNir(f uint8) bool { 92 | _, ok := nirFormats[f] 93 | return ok 94 | } 95 | -------------------------------------------------------------------------------- /tiler/mocks.go: -------------------------------------------------------------------------------- 1 | package tiler 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 8 | ) 9 | 10 | type MockTiler struct { 11 | InputFiles []string 12 | InputFolder string 13 | OutputFolder string 14 | SourceCRS string 15 | Mutators []mutator.Mutator 16 | Opts *TilerOptions 17 | Ctx context.Context 18 | ProcessFilesCalled bool 19 | ProcessFolderCalled bool 20 | // opts settings 21 | EightBit bool 22 | GridSize float64 23 | PtsPerTile int 24 | Depth int 25 | Version version.TilesetVersion 26 | err error 27 | } 28 | 29 | func (m *MockTiler) ProcessFiles(inputLasFiles []string, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error { 30 | m.InputFiles = inputLasFiles 31 | m.OutputFolder = outputFolder 32 | m.SourceCRS = sourceCRS 33 | m.Opts = opts 34 | m.Ctx = ctx 35 | m.ProcessFilesCalled = true 36 | m.EightBit = opts.eightBitColors 37 | m.GridSize = opts.gridSize 38 | m.PtsPerTile = opts.minPointsPerTile 39 | m.Depth = opts.maxDepth 40 | m.Version = opts.version 41 | m.Mutators = opts.mutators 42 | return m.err 43 | } 44 | 45 | func (m *MockTiler) ProcessFolder(inputFolder, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error { 46 | m.InputFolder = inputFolder 47 | m.OutputFolder = outputFolder 48 | m.SourceCRS = sourceCRS 49 | m.Opts = opts 50 | m.Ctx = ctx 51 | m.ProcessFolderCalled = true 52 | m.EightBit = opts.eightBitColors 53 | m.GridSize = opts.gridSize 54 | m.PtsPerTile = opts.minPointsPerTile 55 | m.Depth = opts.maxDepth 56 | m.Version = opts.version 57 | m.Mutators = opts.mutators 58 | return m.err 59 | } 60 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/alecthomas/assert/v2 v2.10.0 h1:jjRCHsj6hBJhkmhznrCzoNpbA3zqy0fYiUcYZP/GkPY= 2 | github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= 3 | github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= 4 | github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= 5 | github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= 6 | github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= 7 | github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= 8 | github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= 9 | github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= 10 | github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= 11 | github.com/qmuntal/gltf v0.25.0 h1:XtK3UkKvMC/43vW3BS3rLVyfUjOtNP2jcQJwaEiUWcI= 12 | github.com/qmuntal/gltf v0.25.0/go.mod h1:YoXZOt0Nc0kIfSKOLZIRoV4FycdC+GzE+3JgiAGYoMs= 13 | github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= 14 | github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= 15 | github.com/twpayne/go-proj/v10 v10.4.0 h1:zyrOgzPPfnfM+CyXU9LJ75TbNMfhXDOX9rMMbUZtCvI= 16 | github.com/twpayne/go-proj/v10 v10.4.0/go.mod h1:BimEgbWRW6P4WstSYdrwZxx+92zCwTzxByVdOGnWSOs= 17 | github.com/urfave/cli/v2 v2.27.2 h1:6e0H+AkS+zDckwPCUrZkKX38mRaau4nL2uipkJpbkcI= 18 | github.com/urfave/cli/v2 v2.27.2/go.mod h1:g0+79LmHHATl7DAcHO99smiR/T7uGLw84w8Y42x+4eM= 19 | github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 h1:+qGGcbkzsfDQNPPe9UDgpxAWQrhbbBXOYJFQDq/dtJw= 20 | github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913/go.mod h1:4aEEwZQutDLsQv2Deui4iYQ6DWTxR14g6m8Wv88+Xqk= 21 | -------------------------------------------------------------------------------- /internal/geom/utils.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | import ( 4 | "math" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 7 | ) 8 | 9 | // LocalToGlobalTransformFromPoint takes in input a set of x,y,z coordinates 10 | // assumed to be in EPSG 4978 CRS, ie based on a earth-centered cartesian system 11 | // wrt the WGS84 ellipsoid and returns a Transform from the global CRS to a local CRS 12 | // that has the following properties: 13 | // - Has origin located on the x,y,z point 14 | // - Has a Z-up axis normal to the WGS84 ellipsoid 15 | func LocalToGlobalTransformFromPoint(x, y, z float64) model.Transform { 16 | zAxis := normalToWGS84FromPoint(x, y, z) 17 | xAxis, yAxis := normals(zAxis) 18 | 19 | toGlobal := [4][4]float64{ 20 | {xAxis.X, yAxis.X, zAxis.X, x}, 21 | {xAxis.Y, yAxis.Y, zAxis.Y, y}, 22 | {xAxis.Z, yAxis.Z, zAxis.Z, z}, 23 | {0, 0, 0, 1}, 24 | } 25 | 26 | return model.NewTransform(toGlobal) 27 | } 28 | 29 | // normals returns a set of two arbitrary unit vectors guaranteed to be 30 | // normal to the input one and between each other 31 | func normals(v model.Vector) (model.Vector, model.Vector) { 32 | arbitraryVector := model.Vector{X: 0, Y: 1, Z: 0} 33 | if v.Cross(arbitraryVector).Norm() < 0.05 { 34 | arbitraryVector = model.Vector{X: 1, Y: 0, Z: 0} 35 | } 36 | xAxis := arbitraryVector.Cross(v).Unit() 37 | yAxis := v.Cross(xAxis).Unit() 38 | return xAxis, yAxis 39 | } 40 | 41 | // normalToWGS84FromPoint returns a Unit vector that is normal to the WGS84 42 | // ellipsoid surface from the given point 43 | func normalToWGS84FromPoint(x, y, z float64) model.Vector { 44 | a := 6378137.0 // Semi-major axis in meters (equatorial radius) 45 | b := 6356752.31424518 // Semi-minor axis in meters (polar radius) 46 | if x == 0 && y == 0 && z == 0 { 47 | // origin, choose the global z axis arbitrarily 48 | return model.Vector{X: 0, Y: 0, Z: 1} 49 | } 50 | 51 | return model.Vector{ 52 | X: 2 * x / math.Pow(a, 2), 53 | Y: 2 * y / math.Pow(a, 2), 54 | Z: 2 * z / math.Pow(b, 2), 55 | }.Unit() 56 | } 57 | -------------------------------------------------------------------------------- /internal/writer/producer.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "path" 7 | "strconv" 8 | "sync" 9 | 10 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 11 | ) 12 | 13 | type Producer interface { 14 | Produce(work chan *WorkUnit, errchan chan error, wg *sync.WaitGroup, node tree.Node, ctx context.Context) 15 | } 16 | 17 | type StandardProducer struct { 18 | basePath string 19 | } 20 | 21 | func NewStandardProducer(basepath string, subfolder string) Producer { 22 | return &StandardProducer{ 23 | basePath: path.Join(basepath, subfolder), 24 | } 25 | } 26 | 27 | // Parses a tree node and submits WorkUnits the the provided workchannel. Should be called only on the tree root node. 28 | // Closes the channel when all work is submitted. 29 | func (p *StandardProducer) Produce(work chan *WorkUnit, errchan chan error, wg *sync.WaitGroup, node tree.Node, ctx context.Context) { 30 | defer func() { 31 | if r := recover(); r != nil { 32 | errchan <- fmt.Errorf("panic: %v", r) 33 | } 34 | }() 35 | defer close(work) 36 | p.produce(errchan, p.basePath, node, work, wg, ctx) 37 | wg.Done() 38 | } 39 | 40 | // Parses a tree node and submits WorkUnits the the provided workchannel. 41 | func (p *StandardProducer) produce(errchan chan error, basePath string, node tree.Node, work chan *WorkUnit, wg *sync.WaitGroup, ctx context.Context) { 42 | // if node contains points (it should always be the case), then submit work 43 | if err := ctx.Err(); err != nil { 44 | errchan <- fmt.Errorf("context closed: %v", err) 45 | return 46 | } 47 | if node.NumberOfPoints() > 0 { 48 | work <- &WorkUnit{ 49 | Node: node, 50 | BasePath: basePath, 51 | } 52 | } else { 53 | errchan <- fmt.Errorf("unexpected error: found tile without points: %v", node) 54 | } 55 | 56 | // iterate all non nil children and recursively submit all work units 57 | for i, child := range node.Children() { 58 | if child != nil { 59 | p.produce(errchan, path.Join(basePath, strconv.Itoa(i)), child, work, wg, ctx) 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /internal/tree/mocks.go: -------------------------------------------------------------------------------- 1 | package tree 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 8 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/las" 9 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 10 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 11 | ) 12 | 13 | type MockNode struct { 14 | Bounds geom.BoundingBox 15 | ChildNodes [8]Node 16 | Pts geom.PointList 17 | TotalNumPts int 18 | Root bool 19 | Leaf bool 20 | GeomError float64 21 | CenterX, CenterY, CenterZ float64 22 | // invocation params 23 | Las las.LasReader 24 | ConvFactory coor.ConverterFactory 25 | Mut mutator.Mutator 26 | Ctx context.Context 27 | LoadCalled bool 28 | BuildCalled bool 29 | Transform *model.Transform 30 | } 31 | 32 | func (n *MockNode) ToParentCRS() *model.Transform { 33 | return n.Transform 34 | } 35 | func (n *MockNode) BoundingBox() geom.BoundingBox { 36 | return n.Bounds 37 | } 38 | func (n *MockNode) Children() [8]Node { 39 | return n.ChildNodes 40 | } 41 | func (n *MockNode) Points() geom.PointList { 42 | return n.Pts 43 | } 44 | func (n *MockNode) TotalNumberOfPoints() int { 45 | return n.TotalNumPts 46 | } 47 | func (n *MockNode) NumberOfPoints() int { 48 | return n.Pts.Len() 49 | } 50 | func (n *MockNode) IsRoot() bool { 51 | return n.Root 52 | } 53 | func (n *MockNode) IsLeaf() bool { 54 | return n.Leaf 55 | } 56 | func (n *MockNode) GeometricError() float64 { 57 | return n.GeomError 58 | } 59 | func (n *MockNode) Build() error { 60 | n.BuildCalled = true 61 | return nil 62 | } 63 | func (n *MockNode) RootNode() Node { 64 | return n 65 | } 66 | func (n *MockNode) Load(l las.LasReader, c coor.ConverterFactory, m mutator.Mutator, ctx context.Context) error { 67 | n.LoadCalled = true 68 | n.Ctx = ctx 69 | n.Las = l 70 | n.ConvFactory = c 71 | n.Mut = m 72 | return nil 73 | } 74 | -------------------------------------------------------------------------------- /share/ITRF2000: -------------------------------------------------------------------------------- 1 | # ITRF2000 params are in cm/year, PJ_helmert uses m/year 2 | +version=1.0.0 +origin=ftp://itrf.ensg.ign.fr/pub/itrf/ITRF.TP +lastupdate=2017-07-25 3 | 4 | # ITRF2000 -> ITRF2005 is only defined the opposite way, so we flip the sign on all 5 | # parameters to get the opposite transformation. Parameters from http://itrf.ign.fr/ITRF_solutions/2005/tp_05-00.php 6 | +proj=helmert +x=-0.0001 +y=0.0008 +z=0.0058 +s=-0.0004 +dx=0.0002 +dy=-0.0001 +dz=0.0018 +ds=-0.00008 +t_epoch=2000.0 +convention=position_vector 7 | 8 | +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector 9 | 10 | +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector 11 | 12 | +proj=helmert +x=0.0067 +y=0.0061 +z=-0.0185 +s=0.00155 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1997.0 +convention=position_vector 13 | 14 | +proj=helmert +x=0.0127 +y=0.0065 +z=-0.0209 +s=0.00195 +rx=-0.00039 +ry=0.00080 +rz=-0.00114 +dx=-0.0029 +dy=-0.0002 +dz=-0.0006 +ds=0.00001 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=1988.0 +convention=position_vector 15 | 16 | +proj=helmert +x=0.0147 +y=0.0135 +z=-0.0139 +s=0.00075 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector 17 | 18 | +proj=helmert +x=0.0267 +y=0.0275 +z=-0.0199 +s=0.00215 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector 19 | 20 | +proj=helmert +x=0.0247 +y=0.0235 +z=-0.0359 +s=0.00245 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector 21 | 22 | +proj=helmert +x=0.0297 +y=0.0475 +z=-0.0739 +s=0.00585 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector 23 | 24 | +proj=helmert +x=0.0247 +y=0.0115 +z=-0.0979 +s=0.00895 +rx=0.0001 +rz=-0.00018 +dy=-0.0006 +dz=-0.0014 +ds=0.00001 +drz=0.00002 +t_epoch=1988.0 +convention=position_vector 25 | -------------------------------------------------------------------------------- /internal/geom/point.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 7 | ) 8 | 9 | // Point64 contains data of a Point Cloud Point, namely X,Y,Z coords, 10 | // R,G,B color components, Intensity and Classification. Coordinates are expressed 11 | // as double precision float64 numbers. 12 | type Point64 struct { 13 | model.Vector 14 | R uint8 15 | G uint8 16 | B uint8 17 | Intensity uint8 18 | Classification uint8 19 | } 20 | 21 | // Builds a new model.Point from the given coordinates, colors, intensity and classification values 22 | func NewPoint(X, Y, Z float32, R, G, B, Intensity, Classification uint8) model.Point { 23 | return model.Point{ 24 | X: X, 25 | Y: Y, 26 | Z: Z, 27 | R: R, 28 | G: G, 29 | B: B, 30 | Intensity: Intensity, 31 | Classification: Classification, 32 | } 33 | } 34 | 35 | // PointList models a list of model.Point. Points are immutable and returned by value. 36 | type PointList interface { 37 | Len() int 38 | Next() (model.Point, error) 39 | Reset() 40 | } 41 | 42 | // LinkedPoint wraps a model.Point to create a Linked List 43 | type LinkedPoint struct { 44 | Next *LinkedPoint 45 | Pt model.Point 46 | } 47 | 48 | // LinkedPointStream is a wrapper helper that allows a LinkedPoint to implement the PointList interface 49 | type LinkedPointStream struct { 50 | len int 51 | current *LinkedPoint 52 | start *LinkedPoint 53 | } 54 | 55 | // NewLinkedPointStream initializes a linked stream from the given root. 56 | // the length is not cross-verified, it must be coherent with the actual point count in the linked list. 57 | func NewLinkedPointStream(root *LinkedPoint, len int) *LinkedPointStream { 58 | return &LinkedPointStream{ 59 | len: len, 60 | current: root, 61 | start: root, 62 | } 63 | } 64 | 65 | func (l *LinkedPointStream) Next() (model.Point, error) { 66 | if l.current == nil { 67 | return model.Point{}, fmt.Errorf("no more points") 68 | } 69 | pt := l.current.Pt 70 | l.current = l.current.Next 71 | return pt, nil 72 | } 73 | 74 | func (l *LinkedPointStream) Len() int { 75 | return l.len 76 | } 77 | 78 | func (l *LinkedPointStream) Reset() { 79 | l.current = l.start 80 | } 81 | -------------------------------------------------------------------------------- /internal/las/reader_test.go: -------------------------------------------------------------------------------- 1 | package las 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "sync" 7 | "testing" 8 | ) 9 | 10 | func TestCombinedReader(t *testing.T) { 11 | entries, err := os.ReadDir("./testdata") 12 | if err != nil { 13 | t.Fatal(err) 14 | } 15 | 16 | files := []string{} 17 | for _, e := range entries { 18 | filename := e.Name() 19 | files = append(files, fmt.Sprintf("./testdata/%s", filename)) 20 | } 21 | 22 | r, err := NewCombinedFileLasReader(files, "EPSG:32633", false) 23 | if err != nil { 24 | t.Fatalf("unexpected error: %v", err) 25 | } 26 | 27 | if actual := r.NumberOfPoints(); actual != 10*len(files) { 28 | t.Errorf("expected %d points got %d", 10*len(files), actual) 29 | } 30 | 31 | if actual := r.GetCRS(); actual != "EPSG:32633" { 32 | t.Errorf("expected epsg %d got epsg %s", 32633, actual) 33 | } 34 | 35 | for i := 0; i < r.NumberOfPoints(); i++ { 36 | _, err := r.GetNext() 37 | if err != nil { 38 | t.Errorf("unexpected error %v", err) 39 | } 40 | } 41 | _, err = r.GetNext() 42 | if err == nil { 43 | t.Errorf("expected error, got none") 44 | } 45 | } 46 | 47 | func TestCombinedReaderConcurrency(t *testing.T) { 48 | entries, err := os.ReadDir("./testdata") 49 | if err != nil { 50 | t.Fatal(err) 51 | } 52 | 53 | files := []string{} 54 | for _, e := range entries { 55 | filename := e.Name() 56 | files = append(files, fmt.Sprintf("./testdata/%s", filename)) 57 | } 58 | 59 | r, err := NewCombinedFileLasReader(files, "EPSG:32633", false) 60 | if err != nil { 61 | t.Fatalf("unexpected error: %v", err) 62 | } 63 | 64 | if actual := r.NumberOfPoints(); actual != 10*len(files) { 65 | t.Errorf("expected %d points got %d", 10*len(files), actual) 66 | } 67 | 68 | if actual := r.GetCRS(); actual != "EPSG:32633" { 69 | t.Errorf("expected epsg %d got epsg %s", 32633, actual) 70 | } 71 | 72 | e := make(chan error, 10) 73 | readFun := func(wg *sync.WaitGroup) { 74 | defer wg.Done() 75 | read := 0 76 | for i := 0; i < r.NumberOfPoints()/5; i++ { 77 | _, err := r.GetNext() 78 | if err != nil { 79 | e <- err 80 | t.Errorf("unexpected error %v", err) 81 | continue 82 | } 83 | read++ 84 | } 85 | fmt.Println(read) 86 | } 87 | wg := &sync.WaitGroup{} 88 | for i := 0; i < 5; i++ { 89 | wg.Add(1) 90 | go readFun(wg) 91 | } 92 | wg.Wait() 93 | if len(e) > 0 { 94 | t.Errorf("errors detected in the error channel but none expected") 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /tiler/model/transform.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | // IdentityTransform is the identity transformation object 4 | var IdentityTransform Transform = Transform{ 5 | forward: [4][4]float64{ 6 | {1, 0, 0, 0}, 7 | {0, 1, 0, 0}, 8 | {0, 0, 1, 0}, 9 | {0, 0, 0, 1}, 10 | }, 11 | inverse: [4][4]float64{ 12 | {1, 0, 0, 0}, 13 | {0, 1, 0, 0}, 14 | {0, 0, 1, 0}, 15 | {0, 0, 0, 1}, 16 | }, 17 | } 18 | 19 | // Transform represents a rigid roto-translation between cartesian reference systems 20 | type Transform struct { 21 | forward [4][4]float64 22 | inverse [4][4]float64 23 | } 24 | 25 | // NewTransform returns a new transform object from the given forward transformation quaternion 26 | func NewTransform(fwd [4][4]float64) Transform { 27 | inverse := [4][4]float64{ 28 | {fwd[0][0], fwd[1][0], fwd[2][0], -fwd[0][0]*fwd[0][3] - fwd[1][0]*fwd[1][3] - fwd[2][0]*fwd[2][3]}, 29 | {fwd[0][1], fwd[1][1], fwd[2][1], -fwd[0][1]*fwd[0][3] - fwd[1][1]*fwd[1][3] - fwd[2][1]*fwd[2][3]}, 30 | {fwd[0][2], fwd[1][2], fwd[2][2], -fwd[0][2]*fwd[0][3] - fwd[1][2]*fwd[1][3] - fwd[2][2]*fwd[2][3]}, 31 | {0, 0, 0, 1}, 32 | } 33 | 34 | return Transform{ 35 | forward: fwd, 36 | inverse: inverse, 37 | } 38 | } 39 | 40 | // Forward transforms the given Vector from the source to the destination CRS 41 | func (q Transform) Forward(v Vector) Vector { 42 | return q.transform(v, q.forward) 43 | } 44 | 45 | // Inverse transforms the given Vector from the destination to the source CRS 46 | func (q Transform) Inverse(v Vector) Vector { 47 | return q.transform(v, q.inverse) 48 | } 49 | 50 | // ForwardColumnMajor returns the forward transformation quaternion in column-major order 51 | func (q Transform) ForwardColumnMajor() [16]float64 { 52 | return q.columnMajor(q.forward) 53 | } 54 | 55 | // ForwardColumnMajor returns the inverse transformation quaternion in column-major order 56 | func (q Transform) InverseColumnMajor() [16]float64 { 57 | return q.columnMajor(q.inverse) 58 | } 59 | 60 | func (q Transform) transform(v Vector, tr [4][4]float64) Vector { 61 | return Vector{ 62 | X: tr[0][0]*v.X + tr[0][1]*v.Y + tr[0][2]*v.Z + tr[0][3], 63 | Y: tr[1][0]*v.X + tr[1][1]*v.Y + tr[1][2]*v.Z + tr[1][3], 64 | Z: tr[2][0]*v.X + tr[2][1]*v.Y + tr[2][2]*v.Z + tr[2][3], 65 | } 66 | } 67 | 68 | func (q Transform) columnMajor(tr [4][4]float64) [16]float64 { 69 | return [16]float64{ 70 | tr[0][0], tr[1][0], tr[2][0], tr[3][0], 71 | tr[0][1], tr[1][1], tr[2][1], tr[3][1], 72 | tr[0][2], tr[1][2], tr[2][2], tr[3][2], 73 | tr[0][3], tr[1][3], tr[2][3], tr[3][3], 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /internal/las/golas/geotiff.go: -------------------------------------------------------------------------------- 1 | package golas 2 | 3 | type GeoTIFFMetadata struct { 4 | Keys map[int]*GeoTIFFKey 5 | } 6 | 7 | type GeoTIFFKey struct { 8 | KeyId int 9 | Type GeoTIFFTagType 10 | RawValue any 11 | } 12 | 13 | type GeoTIFFTagType int 14 | 15 | const ( 16 | GTTagTypeShort GeoTIFFTagType = 0 17 | GTTagTypeDouble GeoTIFFTagType = 1 18 | GTTagTypeString GeoTIFFTagType = 2 19 | ) 20 | 21 | func (g GeoTIFFKey) Name() string { 22 | return geotiffKeys[g.KeyId] 23 | } 24 | 25 | func (g GeoTIFFKey) AsShort() uint16 { 26 | return g.RawValue.(uint16) 27 | } 28 | 29 | func (g GeoTIFFKey) AsDouble() float64 { 30 | return g.RawValue.(float64) 31 | } 32 | 33 | func (g GeoTIFFKey) AsString() string { 34 | return g.RawValue.(string) 35 | } 36 | 37 | var geotiffKeys = map[int]string{ 38 | // GeoTiff Configuration Keys 39 | 1024: "GTModelTypeGeoKey", 40 | 1025: "GTRasterTypeGeoKey", 41 | 1026: "GTCitationGeoKey", 42 | 43 | // Geographic CS Parameter Keys 44 | 2048: "GeographicTypeGeoKey", 45 | 2049: "GeogCitationGeoKey", 46 | 2050: "GeogGeodeticDatumGeoKey", 47 | 2051: "GeogPrimeMeridianGeoKey", 48 | 2052: "GeogLinearUnitsGeoKey", 49 | 2053: "GeogLinearUnitSizeGeoKey", 50 | 2054: "GeogAngularUnitsGeoKey", 51 | 2055: "GeogAngularUnitSizeGeoKey", 52 | 2056: "GeogEllipsoidGeoKey", 53 | 2057: "GeogSemiMajorAxisGeoKey", 54 | 2058: "GeogSemiMinorAxisGeoKey", 55 | 2059: "GeogInvFlatteningGeoKey", 56 | 2060: "GeogAzimuthUnitsGeoKey", 57 | 2061: "GeogPrimeMeridianLongGeoKey", 58 | 59 | // Projected CS Parameter Keys 60 | 3072: "ProjectedCSTypeGeoKey", 61 | 3073: "PCSCitationGeoKey", 62 | 3074: "ProjectionGeoKey", 63 | 3075: "ProjCoordTransGeoKey", 64 | 3076: "ProjLinearUnitsGeoKey", 65 | 3077: "ProjLinearUnitSizeGeoKey", 66 | 3078: "ProjStdParallel1GeoKey", 67 | 3079: "ProjStdParallel2GeoKey", 68 | 3080: "ProjNatOriginLongGeoKey", 69 | 3081: "ProjNatOriginLatGeoKey", 70 | 3082: "ProjFalseEastingGeoKey", 71 | 3083: "ProjFalseNorthingGeoKey", 72 | 3084: "ProjFalseOriginLongGeoKey", 73 | 3085: "ProjFalseOriginLatGeoKey", 74 | 3086: "ProjFalseOriginEastingGeoKey", 75 | 3087: "ProjFalseOriginNorthingGeoKey", 76 | 3088: "ProjCenterLongGeoKey", 77 | 3089: "ProjCenterLatGeoKey", 78 | 3090: "ProjCenterEastingGeoKey", 79 | 3091: "ProjFalseOriginNorthingGeoKey", 80 | 3092: "ProjScaleAtNatOriginGeoKey", 81 | 3093: "ProjScaleAtCenterGeoKey", 82 | 3094: "ProjAzimuthAngleGeoKey", 83 | 3095: "ProjStraightVertPoleLongGeoKey", 84 | 85 | // Vertical CS Keys 86 | 4096: "VerticalCSTypeGeoKey", 87 | 4097: "VerticalCitationGeoKey", 88 | 4098: "VerticalDatumGeoKey", 89 | 4099: "VerticalUnitsGeoKey", 90 | } 91 | -------------------------------------------------------------------------------- /share/proj.ini: -------------------------------------------------------------------------------- 1 | [general] 2 | ; Lines starting by ; are commented lines. 3 | ; 4 | 5 | ; Network capabilities disabled by default. 6 | ; Can be overridden with the PROJ_NETWORK=ON environment variable. 7 | ; Cf https://proj.org/en/latest/usage/network.html 8 | ; Valid values = on, off 9 | network = off 10 | 11 | ; Endpoint of the Content Delivery Network where remote resources might 12 | ; be accessed. Only used if network access is allowed (cf above "network" 13 | ; option) 14 | ; Can be overridden with the PROJ_NETWORK_ENDPOINT environment variable. 15 | cdn_endpoint = https://cdn.proj.org 16 | 17 | ; Whether to enable a cache of remote resources that are accessed, on the 18 | ; local file system 19 | ; Valid values = on, off 20 | cache_enabled = on 21 | 22 | ; Size of the cache in megabytes 23 | cache_size_MB = 300 24 | 25 | ; Time-to-live delay in seconds before already accessed remote resources are 26 | ; accessed again to check if they have been updated. 27 | cache_ttl_sec = 86400 28 | 29 | ; Can be set to on so that by default the lack of a known resource files needed 30 | ; for the best transformation PROJ would normally use causes an error, or off 31 | ; to accept missing resource files without errors or warnings. 32 | ; This default value itself is overridden by the PROJ_ONLY_BEST_DEFAULT environment 33 | ; variable if set, and then by the ONLY_BEST setting that can be 34 | ; passed to the proj_create_crs_to_crs() method, or with the --only-best 35 | ; option of the cs2cs program. 36 | ; (added in PROJ 9.2) 37 | ; Valid values = on, off 38 | only_best_default = off 39 | 40 | ; Filename of the Certificate Authority (CA) bundle. 41 | ; Can be overridden with the PROJ_CURL_CA_BUNDLE / CURL_CA_BUNDLE environment variable. 42 | ; (added in PROJ 9.0) 43 | ; ca_bundle_path = /path/to/cabundle.pem 44 | 45 | ; When this is set to on, the operating systems native CA store will be used for certificate verification 46 | ; If you set this option to on and also set ca_bundle_path then during verification those certificates are 47 | ; searched in addition to the native CA store. 48 | ; (added in PROJ 9.6) 49 | ; Valid values = on, off 50 | ;native_ca = on 51 | 52 | 53 | ; Transverse Mercator (and UTM) default algorithm: auto, evenden_snyder or poder_engsager 54 | ; * evenden_snyder is the fastest, but less accurate far from central meridian 55 | ; * poder_engsager is slower, but more accurate far from central meridian 56 | ; * default will auto-select between the two above depending on the coordinate 57 | ; to transform and will use evenden_snyder if the error in doing so is below 58 | ; 0.1 mm (for an ellipsoid of the size of Earth) 59 | tmerc_default_algo = poder_engsager 60 | -------------------------------------------------------------------------------- /tiler/model/vector_test.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | ) 7 | 8 | const tolerance = 1e-7 9 | 10 | func compareWithTolerance(u Vector, v Vector, t *testing.T) { 11 | if math.Abs(u.X-v.X) > tolerance { 12 | t.Errorf("expected coordinate X %f, got %f", u.X, v.X) 13 | } 14 | if math.Abs(u.Y-v.Y) > tolerance { 15 | t.Errorf("expected coordinate Y %f, got %f", u.Y, v.Y) 16 | } 17 | if math.Abs(u.Z-v.Z) > tolerance { 18 | t.Errorf("expected coordinate Z %f, got %f", u.Z, v.Z) 19 | } 20 | } 21 | 22 | func TestVectorUnit(t *testing.T) { 23 | v := Vector{ 24 | X: 2.0, 25 | Y: 2.0, 26 | Z: 0.0, 27 | } 28 | expected := Vector{X: math.Sqrt(2) / 2, Y: math.Sqrt(2) / 2, Z: 0} 29 | compareWithTolerance(v.Unit(), expected, t) 30 | 31 | v = Vector{ 32 | X: 2.0, 33 | Y: 0.0, 34 | Z: 2.0, 35 | } 36 | expected = Vector{X: math.Sqrt(2) / 2, Y: 0, Z: math.Sqrt(2) / 2} 37 | compareWithTolerance(v.Unit(), expected, t) 38 | 39 | v = Vector{ 40 | X: 0.0, 41 | Y: 2.0, 42 | Z: 2.0, 43 | } 44 | expected = Vector{X: 0, Y: math.Sqrt(2) / 2, Z: math.Sqrt(2) / 2} 45 | compareWithTolerance(v.Unit(), expected, t) 46 | 47 | v = Vector{ 48 | X: 2.0, 49 | Y: 2.0, 50 | Z: 2.0, 51 | } 52 | expected = Vector{X: math.Sqrt(3) / 3, Y: math.Sqrt(3) / 3, Z: math.Sqrt(3) / 3} 53 | compareWithTolerance(v.Unit(), expected, t) 54 | } 55 | 56 | func TestVectorNorm(t *testing.T) { 57 | v := Vector{ 58 | X: 6.0, 59 | Y: 8.0, 60 | Z: 0.0, 61 | } 62 | expected := 10.0 63 | if actual := v.Norm(); actual != expected { 64 | t.Errorf("expected norm %f, got %f", expected, actual) 65 | } 66 | 67 | v = Vector{ 68 | X: 0.0, 69 | Y: 8.0, 70 | Z: 6.0, 71 | } 72 | expected = 10.0 73 | if actual := v.Norm(); actual != expected { 74 | t.Errorf("expected norm %f, got %f", expected, actual) 75 | } 76 | 77 | v = Vector{ 78 | X: -6.0, 79 | Y: -7.0, 80 | Z: 6.0, 81 | } 82 | expected = 11.0 83 | if actual := v.Norm(); actual != expected { 84 | t.Errorf("expected norm %f, got %f", expected, actual) 85 | } 86 | } 87 | 88 | func TestVectorCross(t *testing.T) { 89 | u := Vector{X: 2, Y: 0, Z: 0} 90 | v := Vector{X: 0, Y: 2, Z: 0} 91 | expected := Vector{X: 0, Y: 0, Z: 4} 92 | compareWithTolerance(expected, u.Cross(v), t) 93 | 94 | u = Vector{X: 1, Y: 1, Z: 0} 95 | v = Vector{X: 0, Y: 1, Z: 0} 96 | expected = Vector{X: 0, Y: 0, Z: 1} 97 | compareWithTolerance(expected, u.Cross(v), t) 98 | 99 | u = Vector{X: 0, Y: 1, Z: 0} 100 | v = Vector{X: 0, Y: 0, Z: 1} 101 | expected = Vector{X: 1, Y: 0, Z: 0} 102 | compareWithTolerance(expected, u.Cross(v), t) 103 | } 104 | -------------------------------------------------------------------------------- /internal/tree/tree.go: -------------------------------------------------------------------------------- 1 | package tree 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 8 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/las" 9 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 10 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 11 | ) 12 | 13 | // Tree represents the interface that an Octree representation of the point cloud should implement. 14 | // A tree has a root node with up to 8 children and each can have up to 8 recursively. 15 | // A tree must be "loaded" with points, and then "built" before being used. 16 | type Tree interface { 17 | // Initializes the tree. Must be called before calling GetRootNode but after having called Load. 18 | Build() error 19 | // RootNode returns the root node of the tree 20 | RootNode() Node 21 | // Load loads the points into the tree. Must be called before any other operation on the tree. 22 | // requires providing a coordinate and an elevation converter that will be used by the tree 23 | // to internally perform coordinate conversions, as appropriate. The elevation converter can be nil. 24 | Load(las.LasReader, coor.ConverterFactory, mutator.Mutator, context.Context) error 25 | } 26 | 27 | // Node models a generic node of a Tree. A node contains the points to show on its corresponding LoD. 28 | // It must also be able to compute and return its children. 29 | type Node interface { 30 | // BoundingBox returns the bounding box of the node, expressed in local coordinates 31 | BoundingBox() geom.BoundingBox 32 | // Children returns the 8 children of the current tree node. Some or 33 | // all of these could be nil if not present. 34 | Children() [8]Node 35 | // Points returns the points stored in the current node, not including those in the children. 36 | // Points will have coordinates expressed relative to the local reference system 37 | Points() geom.PointList 38 | // TotalNumberOfPoints returns the number of points contained in this node AND all its children 39 | TotalNumberOfPoints() int 40 | // NumberOfPoints returns the number of points contained in this node, EXCLUDING its children 41 | NumberOfPoints() int 42 | // IsRoot returns true if the node is the root node of the tree representation 43 | IsRoot() bool 44 | // IsLeaf returns true if the current node does not have any children 45 | IsLeaf() bool 46 | // GeometricError returns an estimation, in meters, of the geometric error modeled 47 | // by the current tree node. 48 | GeometricError() float64 49 | // ToParentCRS returns a Transform object to use to transform the coordinates from the 50 | // node local CRS to the parent CRS. For a root node this transforms the 51 | // coordinates back to the EPSG 4978 (ECEF) coordinate system. If nil, it is implied 52 | // that the transform is the identity transform. 53 | ToParentCRS() *model.Transform 54 | } 55 | -------------------------------------------------------------------------------- /tiler/model/transform_test.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import "testing" 4 | 5 | func TestTransformForwardInverse(t *testing.T) { 6 | // pure translation 7 | q := NewTransform( 8 | [4][4]float64{ 9 | {1, 0, 0, 10}, 10 | {0, 1, 0, 20}, 11 | {0, 0, 1, 30}, 12 | {0, 0, 0, 1}, 13 | }, 14 | ) 15 | source := Vector{X: 5, Y: -4, Z: 7} 16 | actual := q.Forward(source) 17 | expected := Vector{X: 15, Y: 16, Z: 37} 18 | compareWithTolerance(expected, actual, t) 19 | actual = q.Inverse(expected) 20 | expected = source 21 | compareWithTolerance(expected, actual, t) 22 | 23 | // pure rotation around z 24 | q = NewTransform( 25 | [4][4]float64{ 26 | {0, -1, 0, 0}, 27 | {1, 0, 0, 0}, 28 | {0, 0, 1, 0}, 29 | {0, 0, 0, 1}, 30 | }, 31 | ) 32 | source = Vector{X: 5, Y: -4, Z: 7} 33 | actual = q.Forward(source) 34 | expected = Vector{X: 4, Y: 5, Z: 7} 35 | compareWithTolerance(expected, actual, t) 36 | actual = q.Inverse(expected) 37 | expected = source 38 | compareWithTolerance(expected, actual, t) 39 | 40 | // pure rotation around x 41 | q = NewTransform( 42 | [4][4]float64{ 43 | {1, 0, 0, 0}, 44 | {0, 0, -1, 0}, 45 | {0, 1, 0, 0}, 46 | {0, 0, 0, 1}, 47 | }, 48 | ) 49 | source = Vector{X: 5, Y: -4, Z: 7} 50 | actual = q.Forward(source) 51 | expected = Vector{X: 5, Y: -7, Z: -4} 52 | compareWithTolerance(expected, actual, t) 53 | actual = q.Inverse(expected) 54 | expected = source 55 | compareWithTolerance(expected, actual, t) 56 | 57 | // pure rotation around y 58 | q = NewTransform( 59 | [4][4]float64{ 60 | {0, 0, 1, 0}, 61 | {0, 1, 0, 0}, 62 | {-1, 0, 0, 0}, 63 | {0, 0, 0, 1}, 64 | }, 65 | ) 66 | source = Vector{X: 5, Y: -4, Z: 7} 67 | actual = q.Forward(source) 68 | expected = Vector{X: 7, Y: -4, Z: -5} 69 | compareWithTolerance(expected, actual, t) 70 | actual = q.Inverse(expected) 71 | expected = source 72 | compareWithTolerance(expected, actual, t) 73 | 74 | // translation and rotation 75 | q = NewTransform( 76 | [4][4]float64{ 77 | {0, -1, 0, 10}, 78 | {1, 0, 0, 20}, 79 | {0, 0, 1, 30}, 80 | {0, 0, 0, 1}, 81 | }, 82 | ) 83 | source = Vector{X: 5, Y: -4, Z: 7} 84 | actual = q.Forward(source) 85 | expected = Vector{X: 14, Y: 25, Z: 37} 86 | compareWithTolerance(expected, actual, t) 87 | actual = q.Inverse(expected) 88 | expected = source 89 | compareWithTolerance(expected, actual, t) 90 | } 91 | 92 | func TestColumnMajorForwardInverse(t *testing.T) { 93 | q := NewTransform( 94 | [4][4]float64{ 95 | {0, -1, 0, 10}, 96 | {1, 0, 0, 20}, 97 | {0, 0, 1, 30}, 98 | {0, 0, 0, 1}, 99 | }, 100 | ) 101 | 102 | expectedForward := [16]float64{ 103 | 0, 1, 0, 0, 104 | -1, 0, 0, 0, 105 | 0, 0, 1, 0, 106 | 10, 20, 30, 1, 107 | } 108 | 109 | expectedInverse := [16]float64{ 110 | 0, -1, 0, 0, 111 | 1, 0, 0, 0, 112 | 0, 0, 1, 0, 113 | -20, 10, -30, 1, 114 | } 115 | 116 | if actual := q.ForwardColumnMajor(); actual != expectedForward { 117 | t.Errorf("expected forward column major %v, got %v", expectedForward, actual) 118 | } 119 | 120 | if actual := q.InverseColumnMajor(); actual != expectedInverse { 121 | t.Errorf("expected inverse column major %v, got %v", expectedInverse, actual) 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /internal/writer/producer_test.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "sync" 7 | "testing" 8 | "time" 9 | 10 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 11 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 12 | ) 13 | 14 | func TestProduce(t *testing.T) { 15 | 16 | pt1 := &geom.LinkedPoint{ 17 | Pt: geom.NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 18 | } 19 | pt2 := &geom.LinkedPoint{ 20 | Pt: geom.NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 21 | } 22 | pt3 := &geom.LinkedPoint{ 23 | Pt: geom.NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 24 | } 25 | pt1.Next = pt2 26 | pt2.Next = pt3 27 | 28 | stream := geom.NewLinkedPointStream(pt1, 3) 29 | stream2 := geom.NewLinkedPointStream(pt2, 2) 30 | 31 | p := NewStandardProducer("path", "folder") 32 | child := &tree.MockNode{ 33 | TotalNumPts: 2, 34 | Pts: stream2, 35 | } 36 | root := &tree.MockNode{ 37 | TotalNumPts: 5, 38 | Pts: stream, 39 | ChildNodes: [8]tree.Node{ 40 | nil, 41 | child, 42 | }, 43 | } 44 | c := make(chan *WorkUnit, 10) 45 | ec := make(chan error) 46 | wg := &sync.WaitGroup{} 47 | wg.Add(1) 48 | p.Produce(c, ec, wg, root, context.TODO()) 49 | wg.Wait() 50 | rootSeen := false 51 | childSeen := false 52 | for wu := range c { 53 | if wu.Node != root && wu.Node != child { 54 | t.Errorf("unexpected unit") 55 | } 56 | if wu.Node == root { 57 | rootSeen = true 58 | if wu.BasePath != "path/folder" { 59 | t.Errorf("unexpected path, expected path/folder, got %s", wu.BasePath) 60 | } 61 | } 62 | if wu.Node == child { 63 | childSeen = true 64 | if wu.BasePath != "path/folder/1" { 65 | t.Errorf("unexpected path, expected path/folder/1, got %s", wu.BasePath) 66 | } 67 | } 68 | } 69 | if !rootSeen || !childSeen { 70 | t.Errorf("not all nodes were seen") 71 | } 72 | if len(ec) != 0 { 73 | t.Errorf("unexpected errors in the channel") 74 | } 75 | } 76 | 77 | func TestProduceWithCancelOk(t *testing.T) { 78 | 79 | pt1 := &geom.LinkedPoint{ 80 | Pt: geom.NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 81 | } 82 | pt2 := &geom.LinkedPoint{ 83 | Pt: geom.NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 84 | } 85 | pt3 := &geom.LinkedPoint{ 86 | Pt: geom.NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 87 | } 88 | pt1.Next = pt2 89 | pt2.Next = pt3 90 | 91 | stream := geom.NewLinkedPointStream(pt1, 3) 92 | stream2 := geom.NewLinkedPointStream(pt2, 2) 93 | 94 | p := NewStandardProducer("path", "folder") 95 | child := &tree.MockNode{ 96 | TotalNumPts: 2, 97 | Pts: stream2, 98 | } 99 | root := &tree.MockNode{ 100 | TotalNumPts: 5, 101 | Pts: stream, 102 | ChildNodes: [8]tree.Node{ 103 | nil, 104 | child, 105 | }, 106 | } 107 | c := make(chan *WorkUnit) 108 | ec := make(chan error, 10) 109 | wg := &sync.WaitGroup{} 110 | wg.Add(1) 111 | mockErr := fmt.Errorf("mock error") 112 | ctx, _ := context.WithDeadlineCause(context.Background(), time.Now().Add(500*time.Millisecond), mockErr) 113 | time.Sleep(600 * time.Millisecond) 114 | p.Produce(c, ec, wg, root, ctx) 115 | wg.Wait() 116 | if len(c) > 0 { 117 | t.Errorf("unexpected work units in the channel") 118 | } 119 | if len(ec) == 0 { 120 | t.Errorf("expected errors in the channel") 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /internal/las/golas/io.go: -------------------------------------------------------------------------------- 1 | package golas 2 | 3 | import ( 4 | "bufio" 5 | "encoding/binary" 6 | "errors" 7 | "io" 8 | "strings" 9 | ) 10 | 11 | // bufferedReadSeeker wraps a io.ReadSeeker adding a buffering layer on top of it. 12 | // Seek operations reset the buffer 13 | type bufferedReadSeeker struct { 14 | r io.ReadSeeker 15 | b bufio.Reader 16 | } 17 | 18 | func newBufferedReadSeeker(r io.ReadSeeker) *bufferedReadSeeker { 19 | return &bufferedReadSeeker{ 20 | r: r, 21 | b: *bufio.NewReaderSize(r, 64*1024), 22 | } 23 | } 24 | 25 | func (b *bufferedReadSeeker) Read(p []byte) (n int, err error) { 26 | return b.b.Read(p) 27 | } 28 | 29 | func (b *bufferedReadSeeker) Seek(offset int64, whence int) (int64, error) { 30 | defer b.b.Reset(b.r) 31 | return b.r.Seek(offset, whence) 32 | } 33 | 34 | func readString(r io.Reader, n int) (string, error) { 35 | data, err := readBytes(r, n) 36 | if err != nil { 37 | return "", err 38 | } 39 | return strings.TrimRight(string(data), "\u0000"), err 40 | } 41 | 42 | func readInt8(r io.Reader) (int8, error) { 43 | var data int8 44 | err := binary.Read(r, binary.LittleEndian, &data) 45 | return data, err 46 | } 47 | 48 | func readUint8(r io.Reader) (uint8, error) { 49 | var data uint8 50 | err := binary.Read(r, binary.LittleEndian, &data) 51 | return data, err 52 | } 53 | 54 | func readShort(r io.Reader) (int16, error) { 55 | var data int16 56 | err := binary.Read(r, binary.LittleEndian, &data) 57 | return data, err 58 | } 59 | 60 | func readUnsignedShort(r io.Reader) (uint16, error) { 61 | var data uint16 62 | err := binary.Read(r, binary.LittleEndian, &data) 63 | return data, err 64 | } 65 | 66 | func readLong(r io.Reader) (int32, error) { 67 | var data int32 68 | err := binary.Read(r, binary.LittleEndian, &data) 69 | return data, err 70 | } 71 | 72 | func readUnsignedLong(r io.Reader) (uint32, error) { 73 | var data uint32 74 | err := binary.Read(r, binary.LittleEndian, &data) 75 | return data, err 76 | } 77 | 78 | func readUnsignedLong64(r io.Reader) (uint64, error) { 79 | var data uint64 80 | err := binary.Read(r, binary.LittleEndian, &data) 81 | return data, err 82 | } 83 | 84 | func readFloat32(r io.Reader) (float32, error) { 85 | var data float32 86 | err := binary.Read(r, binary.LittleEndian, &data) 87 | return data, err 88 | } 89 | 90 | func readFloat64(r io.Reader) (float64, error) { 91 | var data float64 92 | err := binary.Read(r, binary.LittleEndian, &data) 93 | return data, err 94 | } 95 | 96 | func readUnsignedLongArray(r io.Reader, n int) ([]uint32, error) { 97 | out := make([]uint32, n) 98 | for i := 0; i < n; i++ { 99 | data, err := readUnsignedLong(r) 100 | if err != nil { 101 | return out, err 102 | } 103 | out[i] = data 104 | } 105 | return out, nil 106 | } 107 | 108 | func readUnsignedLong64Array(r io.Reader, n int) ([]uint64, error) { 109 | out := make([]uint64, n) 110 | for i := 0; i < n; i++ { 111 | data, err := readUnsignedLong64(r) 112 | if err != nil { 113 | return out, err 114 | } 115 | out[i] = data 116 | } 117 | return out, nil 118 | } 119 | 120 | func readBytes(r io.Reader, n int) ([]byte, error) { 121 | data := make([]byte, n) 122 | nRead, err := io.ReadFull(r, data) 123 | if err != nil { 124 | return nil, err 125 | } 126 | if nRead != n { 127 | return nil, errors.New("unexpected number of bytes read") 128 | } 129 | return data, nil 130 | } 131 | -------------------------------------------------------------------------------- /internal/conv/coor/proj/proj.go: -------------------------------------------------------------------------------- 1 | package proj 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "path/filepath" 7 | 8 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 9 | "github.com/twpayne/go-proj/v10" 10 | ) 11 | 12 | const epsg4978crs = "EPSG:4978" 13 | 14 | type projCoordinateConverter struct { 15 | projections map[string]*proj.PJ 16 | searchPath string 17 | } 18 | 19 | // Returns a 20 | func NewProjCoordinateConverter() (*projCoordinateConverter, error) { 21 | // Initialization of EPSG Proj4 database 22 | conv := &projCoordinateConverter{ 23 | projections: make(map[string]*proj.PJ), 24 | } 25 | 26 | // set the search path to the share folder in the same folder as the executable path 27 | execPath, err := os.Executable() 28 | if err != nil { 29 | return nil, err 30 | } 31 | conv.searchPath = filepath.Join(filepath.Dir(execPath), "share") 32 | return conv, nil 33 | } 34 | 35 | // Converts the given coordinate from the given source crs to the given target crs. 36 | func (cc *projCoordinateConverter) Transform(sourceCRS string, targetCRS string, coord model.Vector) (model.Vector, error) { 37 | if sourceCRS == targetCRS { 38 | return coord, nil 39 | } 40 | pj, err := cc.getProjection(sourceCRS, targetCRS) 41 | if err != nil { 42 | return model.Vector{}, err 43 | } 44 | c := proj.NewCoord(coord.X, coord.Y, coord.Z, 0) 45 | out, err := pj.Forward(c) 46 | if err != nil { 47 | return coord, fmt.Errorf("error while transforming coordinates: %w", err) 48 | } 49 | return model.Vector{X: out.X(), Y: out.Y(), Z: out.Z()}, nil 50 | } 51 | 52 | // Converts the input coordinate from the given CRS to EPSG:4978 srid 53 | func (cc *projCoordinateConverter) ToWGS84Cartesian(sourceCRS string, coord model.Vector) (model.Vector, error) { 54 | if sourceCRS == epsg4978crs { 55 | return coord, nil 56 | } 57 | 58 | return cc.Transform(sourceCRS, epsg4978crs, coord) 59 | } 60 | 61 | // Releases all projection objects from memory 62 | func (cc *projCoordinateConverter) Cleanup() { 63 | for _, pj := range cc.projections { 64 | if pj != nil { 65 | pj.Destroy() 66 | } 67 | } 68 | // reset the projection cache 69 | cc.projections = make(map[string]*proj.PJ) 70 | } 71 | 72 | // Returns the projection object corresponding to the given crs representations, caching it internally to be reused 73 | // This object is not designed for concurrent usage by multiple goroutines 74 | func (cc *projCoordinateConverter) getProjection(source string, target string) (*proj.PJ, error) { 75 | uniqueProjectionCode := source + "#" + target 76 | if val, ok := cc.projections[uniqueProjectionCode]; ok { 77 | return val, nil 78 | } 79 | ctx := proj.NewContext() 80 | // set the search path if it points to a valid folder 81 | if _, err := os.Stat(cc.searchPath); err == nil { 82 | ctx.SetSearchPaths([]string{cc.searchPath}) 83 | } 84 | sourcePj, err := ctx.New(source) 85 | if err != nil { 86 | return nil, err 87 | } 88 | defer sourcePj.Destroy() 89 | targetPJ, err := ctx.New(target) 90 | if err != nil { 91 | return nil, err 92 | } 93 | defer targetPJ.Destroy() 94 | pj, err := ctx.NewCRSToCRSFromPJ(sourcePj, targetPJ, nil, "") 95 | if err != nil { 96 | return nil, fmt.Errorf("unable to initialize projection between %s and %s: %w", source, target, err) 97 | } 98 | pj, err = pj.NormalizeForVisualization() 99 | if err != nil { 100 | return nil, fmt.Errorf("unable to normalize the projection between %s and %s: %w", source, target, err) 101 | } 102 | 103 | cc.projections[uniqueProjectionCode] = pj 104 | 105 | return pj, nil 106 | } 107 | -------------------------------------------------------------------------------- /share/ITRF2014: -------------------------------------------------------------------------------- 1 | # ITRF2014 params are in mm/year, PJ_helmert uses m/year 2 | +version=1.0.0 +origin=http://itrf.ign.fr/doc_ITRF/Transfo-ITRF2014_ITRFs.txt +lastupdate=2017-07-26 3 | 4 | +proj=helmert +x=0.0016 +y=0.0019 +z=0.0024 +s=-0.00002 +dz=-0.0001 +ds=0.00003 +t_epoch=2010.0 +convention=position_vector 5 | 6 | +proj=helmert +x=0.0026 +y=0.001 +z=-0.0023 +s=0.00092 +dx=0.0003 +dz=-0.0001 +ds=0.00003 +t_epoch=2010.0 +convention=position_vector 7 | 8 | +proj=helmert +x=0.0007 +y=0.0012 +z=-0.0261 +s=0.00212 +dx=0.0001 +dy=0.0001 +dz=-0.0019 +ds=0.00011 +t_epoch=2010.0 +convention=position_vector 9 | 10 | +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 11 | 12 | +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 13 | 14 | +proj=helmert +x=0.0074 +y=-0.0005 +z=-0.0628 +s=0.0038 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 15 | 16 | +proj=helmert +x=-0.0504 +y=0.0033 +z=-0.0602 +s=0.00429 +rx=-0.00281 +ry=-0.00338 +rz=0.0004 +dx=-0.0028 +dy=-0.0001 +dz=-0.0025 +ds=0.00012 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=2010.0 +convention=position_vector 17 | 18 | +proj=helmert +x=0.0154 +y=0.0015 +z=-0.0708 +s=0.00309 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 19 | 20 | +proj=helmert +x=0.0274 +y=0.0155 +z=-0.0768 +s=0.00449 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 21 | 22 | +proj=helmert +x=0.0254 +y=0.0115 +z=-0.0928 +s=0.00479 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 23 | 24 | +proj=helmert +x=0.0304 +y=0.0355 +z=-0.1308 +s=0.00819 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 25 | 26 | +proj=helmert +x=0.0254 +y=-0.0005 +z=-0.1548 +s=0.01129 +rx=0.0001 +rz=0.00026 +dx=0.0001 +dy=-0.0005 +dz=-0.0033 +ds=0.00012 +drz=0.00002 +t_epoch=2010.0 +convention=position_vector 27 | 28 | # ITRF2014 Plate Motion Model parameters 29 | # 30 | # As described in 31 | # 32 | # Z. Altamimi et al, 2017, ITRF2014 plate motion model, 33 | # doi: 10.1093/gji/ggx136 34 | 35 | +proj=helmert +drx=-0.000248 +dry=-0.000324 +drz=0.000675 +convention=position_vector 36 | 37 | +proj=helmert +drx=0.001154 +dry=-0.000136 +drz=0.001444 +convention=position_vector 38 | 39 | +proj=helmert +drx=0.001510 +dry=0.001182 +drz=0.001215 +convention=position_vector 40 | 41 | +proj=helmert +drx=-0.000085 +dry=-0.000531 +drz=0.000770 +convention=position_vector 42 | 43 | +proj=helmert +drx=0.001154 +dry=-0.000005 +drz=0.001454 +convention=position_vector 44 | 45 | +proj=helmert +drx=-0.000333 +dry=-0.001544 +drz=0.001623 +convention=position_vector 46 | 47 | +proj=helmert +drx=0.000024 +dry=-0.000694 +drz=-0.000063 +convention=position_vector 48 | 49 | +proj=helmert +drx=0.000099 +dry=-0.000614 +drz=0.000733 +convention=position_vector 50 | 51 | +proj=helmert +drx=-0.000409 +dry=0.001047 +drz=-0.002169 +convention=position_vector 52 | 53 | +proj=helmert +drx=-0.000270 +dry=-0.000301 +drz=-0.000140 +convention=position_vector 54 | 55 | +proj=helmert +drx=-0.000121 +dry=-0.000794 +drz=0.000884 +convention=position_vector 56 | -------------------------------------------------------------------------------- /internal/geom/utils_test.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | ) 9 | 10 | const tolerance = 1e-7 11 | 12 | func compareWithTolerance(u model.Vector, v model.Vector, t *testing.T) { 13 | if math.Abs(u.X-v.X) > tolerance { 14 | t.Errorf("expected coordinate X %f, got %f", u.X, v.X) 15 | } 16 | if math.Abs(u.Y-v.Y) > tolerance { 17 | t.Errorf("expected coordinate Y %f, got %f", u.Y, v.Y) 18 | } 19 | if math.Abs(u.Z-v.Z) > tolerance { 20 | t.Errorf("expected coordinate Z %f, got %f", u.Z, v.Z) 21 | } 22 | } 23 | 24 | func TestLocalToGlobalTransformFromPoint(t *testing.T) { 25 | origin := model.Vector{X: 100, Y: 0, Z: 0} 26 | trans := LocalToGlobalTransformFromPoint(origin.X, origin.Y, origin.Z) 27 | // assert correctness indirectly 28 | // should be centered in the input point 29 | compareWithTolerance(origin, trans.Forward(model.Vector{}), t) 30 | // Z axis should be oriented correctly 31 | compareWithTolerance(model.Vector{X: 100 + 1, Y: 0, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 0, Z: 1}), t) 32 | compareWithTolerance(model.Vector{X: 0, Y: 0, Z: 0}, trans.Inverse(model.Vector{X: 100, Y: 0, Z: 0}), t) 33 | // X axis should be oriented correctly 34 | compareWithTolerance(model.Vector{X: 100, Y: 0, Z: -1}, trans.Forward(model.Vector{X: 1, Y: 0, Z: 0}), t) 35 | compareWithTolerance(model.Vector{X: 1, Y: 0, Z: 0}, trans.Inverse(model.Vector{X: 100, Y: 0, Z: -1}), t) 36 | // Y axis should be oriented correctly 37 | compareWithTolerance(model.Vector{X: 100, Y: 1, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 1, Z: 0}), t) 38 | compareWithTolerance(model.Vector{X: 0, Y: 1, Z: 0}, trans.Inverse(model.Vector{X: 100, Y: 1, Z: 0}), t) 39 | 40 | origin = model.Vector{X: 0, Y: 100, Z: 0} 41 | trans = LocalToGlobalTransformFromPoint(origin.X, origin.Y, origin.Z) 42 | // assert correctness indirectly 43 | // should be centered in the input point 44 | compareWithTolerance(origin, trans.Forward(model.Vector{}), t) 45 | // Z axis should be oriented correctly 46 | compareWithTolerance(model.Vector{X: 0, Y: 100 + 1, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 0, Z: 1}), t) 47 | // X axis should be oriented correctly 48 | compareWithTolerance(model.Vector{X: 0, Y: 100, Z: 1}, trans.Forward(model.Vector{X: 1, Y: 0, Z: 0}), t) 49 | // Y axis should be oriented correctly 50 | compareWithTolerance(model.Vector{X: 1, Y: 100, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 1, Z: 0}), t) 51 | 52 | origin = model.Vector{X: 0, Y: 100, Z: 0} 53 | trans = LocalToGlobalTransformFromPoint(origin.X, origin.Y, origin.Z) 54 | // assert correctness indirectly 55 | // should be centered in the input point 56 | compareWithTolerance(origin, trans.Forward(model.Vector{}), t) 57 | // Z axis should be oriented correctly 58 | compareWithTolerance(model.Vector{X: 0, Y: 100 + 1, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 0, Z: 1}), t) 59 | 60 | origin = model.Vector{X: -100, Y: 0, Z: 0} 61 | trans = LocalToGlobalTransformFromPoint(origin.X, origin.Y, origin.Z) 62 | // assert correctness indirectly 63 | // should be centered in the input point 64 | compareWithTolerance(origin, trans.Forward(model.Vector{}), t) 65 | // Z axis should be oriented correctly 66 | compareWithTolerance(model.Vector{X: -100 - 1, Y: 0, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 0, Z: 1}), t) 67 | 68 | origin = model.Vector{X: 0, Y: -100, Z: 0} 69 | trans = LocalToGlobalTransformFromPoint(origin.X, origin.Y, origin.Z) 70 | // assert correctness indirectly 71 | // should be centered in the input point 72 | compareWithTolerance(origin, trans.Forward(model.Vector{}), t) 73 | // Z axis should be oriented correctly 74 | compareWithTolerance(model.Vector{X: 0, Y: -100 - 1, Z: 0}, trans.Forward(model.Vector{X: 0, Y: 0, Z: 1}), t) 75 | } 76 | -------------------------------------------------------------------------------- /internal/writer/writer.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "context" 5 | "math" 6 | "sync" 7 | 8 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 9 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 10 | ) 11 | 12 | // Writer writes a tree as a 3D Cesium Point cloud to the given output folder 13 | type Writer interface { 14 | Write(t tree.Tree, folderName string, ctx context.Context) error 15 | } 16 | 17 | type StandardWriter struct { 18 | numWorkers int 19 | bufferRatio int 20 | basePath string 21 | version version.TilesetVersion 22 | producerFunc func(basepath, folder string) Producer 23 | consumerFunc func(version.TilesetVersion) Consumer 24 | } 25 | 26 | func NewWriter(basePath string, options ...func(*StandardWriter)) (*StandardWriter, error) { 27 | w := &StandardWriter{ 28 | basePath: basePath, 29 | numWorkers: 1, 30 | bufferRatio: 5, 31 | version: version.TilesetVersion_1_0, 32 | producerFunc: NewStandardProducer, 33 | consumerFunc: func(v version.TilesetVersion) Consumer { 34 | if v == version.TilesetVersion_1_0 { 35 | return NewStandardConsumer(WithGeometryEncoder(NewPntsEncoder())) 36 | } 37 | return NewStandardConsumer(WithGeometryEncoder(NewGltfEncoder())) 38 | }, 39 | } 40 | for _, optFn := range options { 41 | optFn(w) 42 | } 43 | return w, nil 44 | } 45 | 46 | // WithNumWorkers defines how many writer goroutines to launch when writing the tiles. 47 | func WithNumWorkers(n int) func(*StandardWriter) { 48 | return func(w *StandardWriter) { 49 | w.numWorkers = n 50 | } 51 | } 52 | 53 | // WithBufferRation defines how many jobs per writer worker to allow enqueuing. 54 | func WithBufferRatio(n int) func(*StandardWriter) { 55 | return func(w *StandardWriter) { 56 | w.bufferRatio = int(math.Max(1, float64(n))) 57 | } 58 | } 59 | 60 | // WithTilesetVersion sets the version of the generated tilesets. version 1.0 generates .pnts gemetries 61 | // while version 1.1 generates .glb (gltf) geometries. 62 | func WithTilesetVersion(v version.TilesetVersion) func(*StandardWriter) { 63 | return func(w *StandardWriter) { 64 | w.version = v 65 | } 66 | } 67 | 68 | func (w *StandardWriter) Write(t tree.Tree, folderName string, ctx context.Context) error { 69 | // init channel where consumers can eventually submit errors that prevented them to finish the job 70 | errorChannel := make(chan error) 71 | 72 | // init channel where to submit work with a buffer N times greater than the number of consumer 73 | workChannel := make(chan *WorkUnit, w.numWorkers*w.bufferRatio) 74 | 75 | var waitGroup sync.WaitGroup 76 | var errorWaitGroup sync.WaitGroup 77 | 78 | // producing is easy, only 1 producer 79 | producer := w.producerFunc(w.basePath, folderName) 80 | waitGroup.Add(1) 81 | go producer.Produce(workChannel, errorChannel, &waitGroup, t.RootNode(), ctx) 82 | 83 | // add consumers to waitgroup and launch them 84 | for i := 0; i < w.numWorkers; i++ { 85 | waitGroup.Add(1) 86 | // instantiate a new converter per each goroutine for thread safety 87 | consumer := w.consumerFunc(w.version) 88 | go consumer.Consume(workChannel, errorChannel, &waitGroup) 89 | } 90 | 91 | // launch error listener 92 | errs := []error{} 93 | errorWaitGroup.Add(1) 94 | go func() { 95 | defer errorWaitGroup.Done() 96 | for { 97 | err, ok := <-errorChannel 98 | if !ok { 99 | return 100 | } 101 | errs = append(errs, err) 102 | } 103 | }() 104 | 105 | // wait for producers and consumers to finish 106 | waitGroup.Wait() 107 | 108 | // close error chan 109 | close(errorChannel) 110 | errorWaitGroup.Wait() 111 | 112 | if len(errs) != 0 { 113 | return errs[0] 114 | } 115 | return nil 116 | } 117 | -------------------------------------------------------------------------------- /internal/geom/bbox_test.go: -------------------------------------------------------------------------------- 1 | package geom 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func TestNewBBox(t *testing.T) { 8 | actual := NewBoundingBox(-10, 0, 10, 20, -100, 20) 9 | expected := BoundingBox{ 10 | Xmin: -10, 11 | Xmax: 0, 12 | Xmid: -5, 13 | Ymin: 10, 14 | Ymax: 20, 15 | Ymid: 15, 16 | Zmin: -100, 17 | Zmax: 20, 18 | Zmid: -40, 19 | } 20 | if actual != expected { 21 | t.Errorf("expected boundingbox %v got %v", expected, actual) 22 | } 23 | } 24 | 25 | func TestNewBBoxFromParent(t *testing.T) { 26 | parent := NewBoundingBox(-10, 0, 10, 20, -100, 20) 27 | actual := NewBoundingBoxFromParent(parent, 0) 28 | expected := BoundingBox{ 29 | Xmin: -10, 30 | Xmax: -5, 31 | Xmid: -7.5, 32 | Ymin: 10, 33 | Ymax: 15, 34 | Ymid: 12.5, 35 | Zmin: -100, 36 | Zmax: -40, 37 | Zmid: -70, 38 | } 39 | if actual != expected { 40 | t.Errorf("expected boundingbox %v got %v", expected, actual) 41 | } 42 | 43 | actual = NewBoundingBoxFromParent(parent, 1) 44 | expected = BoundingBox{ 45 | Xmin: -5, 46 | Xmax: -0, 47 | Xmid: -2.5, 48 | Ymin: 10, 49 | Ymax: 15, 50 | Ymid: 12.5, 51 | Zmin: -100, 52 | Zmax: -40, 53 | Zmid: -70, 54 | } 55 | if actual != expected { 56 | t.Errorf("expected boundingbox %v got %v", expected, actual) 57 | } 58 | 59 | actual = NewBoundingBoxFromParent(parent, 2) 60 | expected = BoundingBox{ 61 | Xmin: -10, 62 | Xmax: -5, 63 | Xmid: -7.5, 64 | Ymin: 15, 65 | Ymax: 20, 66 | Ymid: 17.5, 67 | Zmin: -100, 68 | Zmax: -40, 69 | Zmid: -70, 70 | } 71 | if actual != expected { 72 | t.Errorf("expected boundingbox %v got %v", expected, actual) 73 | } 74 | 75 | actual = NewBoundingBoxFromParent(parent, 3) 76 | expected = BoundingBox{ 77 | Xmin: -5, 78 | Xmax: -0, 79 | Xmid: -2.5, 80 | Ymin: 15, 81 | Ymax: 20, 82 | Ymid: 17.5, 83 | Zmin: -100, 84 | Zmax: -40, 85 | Zmid: -70, 86 | } 87 | if actual != expected { 88 | t.Errorf("expected boundingbox %v got %v", expected, actual) 89 | } 90 | 91 | actual = NewBoundingBoxFromParent(parent, 4) 92 | expected = BoundingBox{ 93 | Xmin: -10, 94 | Xmax: -5, 95 | Xmid: -7.5, 96 | Ymin: 10, 97 | Ymax: 15, 98 | Ymid: 12.5, 99 | Zmin: -40, 100 | Zmax: 20, 101 | Zmid: -10, 102 | } 103 | if actual != expected { 104 | t.Errorf("expected boundingbox %v got %v", expected, actual) 105 | } 106 | 107 | actual = NewBoundingBoxFromParent(parent, 5) 108 | expected = BoundingBox{ 109 | Xmin: -5, 110 | Xmax: -0, 111 | Xmid: -2.5, 112 | Ymin: 10, 113 | Ymax: 15, 114 | Ymid: 12.5, 115 | Zmin: -40, 116 | Zmax: 20, 117 | Zmid: -10, 118 | } 119 | if actual != expected { 120 | t.Errorf("expected boundingbox %v got %v", expected, actual) 121 | } 122 | 123 | actual = NewBoundingBoxFromParent(parent, 6) 124 | expected = BoundingBox{ 125 | Xmin: -10, 126 | Xmax: -5, 127 | Xmid: -7.5, 128 | Ymin: 15, 129 | Ymax: 20, 130 | Ymid: 17.5, 131 | Zmin: -40, 132 | Zmax: 20, 133 | Zmid: -10, 134 | } 135 | if actual != expected { 136 | t.Errorf("expected boundingbox %v got %v", expected, actual) 137 | } 138 | 139 | actual = NewBoundingBoxFromParent(parent, 7) 140 | expected = BoundingBox{ 141 | Xmin: -5, 142 | Xmax: -0, 143 | Xmid: -2.5, 144 | Ymin: 15, 145 | Ymax: 20, 146 | Ymid: 17.5, 147 | Zmin: -40, 148 | Zmax: 20, 149 | Zmid: -10, 150 | } 151 | if actual != expected { 152 | t.Errorf("expected boundingbox %v got %v", expected, actual) 153 | } 154 | } 155 | 156 | func TestBBoxAsCesiumBox(t *testing.T) { 157 | b := NewBoundingBox(-30, 10, 10, 20, -100, 20) 158 | expected := [12]float64{ 159 | -10, 15, -40, 160 | 20, 0, 0, 161 | 0, 5, 0, 162 | 0, 0, 60, 163 | } 164 | 165 | if actual := b.AsCesiumBox(); actual != expected { 166 | t.Errorf("expected boundingbox array %v got %v", expected, actual) 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /internal/conv/coor/proj/proj_test.go: -------------------------------------------------------------------------------- 1 | package proj 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 8 | "github.com/twpayne/go-proj/v10" 9 | ) 10 | 11 | var coordTolerance = 0.01 12 | 13 | func TestToSrid(t *testing.T) { 14 | c, err := NewProjCoordinateConverter() 15 | if err != nil { 16 | t.Fatalf("unexpected error %v", err) 17 | } 18 | 19 | // 4326 to 4978 20 | actual, err := c.Transform("EPSG:4326", "EPSG:4978", model.Vector{X: 123.474003, Y: 8.099314, Z: 0}) 21 | if err != nil { 22 | t.Errorf("unexpected error %v", err) 23 | } 24 | expected := model.Vector{X: -3483057.5277292132, Y: 5267517.241803079, Z: 892655.4197953615} 25 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 26 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 27 | } 28 | 29 | // 4978 to 4326 30 | expected, err = c.Transform("EPSG:4978", "EPSG:4326", model.Vector{X: -3483057.5277292132, Y: 5267517.241803079, Z: 892655.4197953615}) 31 | if err != nil { 32 | t.Errorf("unexpected error %v", err) 33 | } 34 | actual = model.Vector{X: 123.474003, Y: 8.099314, Z: 0} 35 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 36 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 37 | } 38 | 39 | // 4326 to 3124 40 | actual, err = c.Transform("EPSG:4326", "EPSG:3124", model.Vector{X: 123.474003, Y: 8.099314, Z: 0}) 41 | if err != nil { 42 | t.Errorf("unexpected error %v", err) 43 | } 44 | expected = model.Vector{X: 552074.5400524682, Y: 895674.6033419219, Z: 0} 45 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 46 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 47 | } 48 | 49 | // 4978 to 3124 50 | actual, err = c.Transform("EPSG:4978", "EPSG:3124", model.Vector{X: -3483057.5277292132, Y: 5267517.241803079, Z: 892655.4197953615}) 51 | if err != nil { 52 | t.Errorf("unexpected error %v", err) 53 | } 54 | expected = model.Vector{X: 552074.5400524682, Y: 895674.6033419219, Z: 0} 55 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 56 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 57 | } 58 | 59 | // 3124 to 4978 60 | actual, err = c.Transform("EPSG:3124", "EPSG:4978", model.Vector{X: 552074.5400524682, Y: 895674.6033419219, Z: 0}) 61 | if err != nil { 62 | t.Errorf("unexpected error %v", err) 63 | } 64 | expected = model.Vector{X: -3483057.5277292132, Y: 5267517.241803079, Z: 892655.4197953615} 65 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 66 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 67 | } 68 | c.Cleanup() 69 | } 70 | 71 | func TestToWGS84Cartesian(t *testing.T) { 72 | c, err := NewProjCoordinateConverter() 73 | if err != nil { 74 | t.Fatalf("unexpected error %v", err) 75 | } 76 | 77 | // 4326 to 4978 78 | actual, err := c.ToWGS84Cartesian("EPSG:4326", model.Vector{X: 123.474003, Y: 8.099314, Z: 0}) 79 | if err != nil { 80 | t.Errorf("unexpected error %v", err) 81 | } 82 | expected := model.Vector{X: -3483057.5277292132, Y: 5267517.241803079, Z: 892655.4197953615} 83 | if err := utils.CompareCoord(actual, expected, coordTolerance); err != nil { 84 | t.Errorf("expected coordinate %v, got %v. Err: %v", expected, actual, err) 85 | } 86 | c.Cleanup() 87 | } 88 | func TestTest(t *testing.T) { 89 | context := proj.NewContext() 90 | 91 | // The C function does not return any error hence we can only reasonably 92 | // validate that executing the SetSearchPaths function call does not panic 93 | // considering various boundary conditions 94 | context.SetSearchPaths(nil) 95 | context.SetSearchPaths([]string{}) 96 | context.SetSearchPaths([]string{"/tmp/data"}) 97 | context.SetSearchPaths([]string{"/tmp/data", "/tmp/data2"}) 98 | } 99 | -------------------------------------------------------------------------------- /tiler/options.go: -------------------------------------------------------------------------------- 1 | package tiler 2 | 3 | import ( 4 | "runtime" 5 | 6 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 7 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 8 | ) 9 | 10 | type TilerEvent int 11 | 12 | const ( 13 | EventReadLasHeaderStarted TilerEvent = iota 14 | EventReadLasHeaderCompleted 15 | EventReadCRSDetected 16 | EventReadLasHeaderError 17 | EventPointLoadingStarted 18 | EventPointLoadingCompleted 19 | EventPointLoadingError 20 | EventBuildStarted 21 | EventBuildCompleted 22 | EventBuildError 23 | EventExportStarted 24 | EventExportCompleted 25 | EventExportError 26 | ) 27 | 28 | type TilerOptions struct { 29 | gridSize float64 30 | maxDepth int 31 | mutators []mutator.Mutator 32 | eightBitColors bool 33 | numWorkers int 34 | minPointsPerTile int 35 | callback TilerCallback 36 | version version.TilesetVersion 37 | } 38 | 39 | type tilerOptionsFn func(*TilerOptions) 40 | 41 | type TilerCallback func(event TilerEvent, inputDesc string, elapsed int64, msg string) 42 | 43 | // NewDefaultTilerOptions returns sensible defaults for tiling options 44 | func NewDefaultTilerOptions() *TilerOptions { 45 | return &TilerOptions{ 46 | gridSize: 20, 47 | maxDepth: 10, 48 | numWorkers: runtime.NumCPU(), 49 | minPointsPerTile: 5000, 50 | eightBitColors: false, 51 | callback: nil, 52 | version: version.TilesetVersion_1_0, 53 | } 54 | } 55 | 56 | // NewTilerOptions returns default tiler options modified using the 57 | // provided manipulating functions 58 | func NewTilerOptions(optFn ...tilerOptionsFn) *TilerOptions { 59 | opts := NewDefaultTilerOptions() 60 | for _, fn := range optFn { 61 | fn(opts) 62 | } 63 | return opts 64 | } 65 | 66 | // WithGridSize sets the max grid size, i.e. the approximate max allowed spacing between 67 | // any two points at the coarser level of detail. Expressed in meters. 68 | func WithGridSize(size float64) tilerOptionsFn { 69 | return func(opt *TilerOptions) { 70 | opt.gridSize = size 71 | } 72 | } 73 | 74 | // WithMaxDepth sets the max depth, i.e. the maximum number of levels the tree can reach. 75 | func WithMaxDepth(maxDepth int) tilerOptionsFn { 76 | return func(opt *TilerOptions) { 77 | opt.maxDepth = maxDepth 78 | } 79 | } 80 | 81 | // WithMutators adds the specified list of mutators to the processing step of the cloud 82 | func WithMutators(m []mutator.Mutator) tilerOptionsFn { 83 | return func(opt *TilerOptions) { 84 | opt.mutators = m 85 | } 86 | } 87 | 88 | // WithWorkerNumber sets the number of workers to use to read the las files or to 89 | // run the export jobs 90 | func WithWorkerNumber(numWorkers int) tilerOptionsFn { 91 | return func(opt *TilerOptions) { 92 | opt.numWorkers = numWorkers 93 | } 94 | } 95 | 96 | // WithMinPointsPerTile returns the minimum number of points a tile must store to exist. 97 | // Used to avoid almost empty tiles that could be consolidated with their parent. 98 | func WithMinPointsPerTile(minPointsPerTile int) tilerOptionsFn { 99 | return func(opt *TilerOptions) { 100 | opt.minPointsPerTile = minPointsPerTile 101 | } 102 | } 103 | 104 | // WithCallback sets a function that should be invoked as the tiler job runs 105 | func WithCallback(callback TilerCallback) tilerOptionsFn { 106 | return func(opt *TilerOptions) { 107 | opt.callback = callback 108 | } 109 | } 110 | 111 | // WithEightBitColors true forces the tiler to interpret the color info on the file as eight bit colors 112 | func WithEightBitColors(eightBit bool) tilerOptionsFn { 113 | return func(opt *TilerOptions) { 114 | opt.eightBitColors = eightBit 115 | } 116 | } 117 | 118 | // WithTilesetVersion sets the version of the tilsets to generate 119 | func WithTilesetVersion(v version.TilesetVersion) tilerOptionsFn { 120 | return func(opt *TilerOptions) { 121 | opt.version = v 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /internal/las/golas/point.go: -------------------------------------------------------------------------------- 1 | package golas 2 | 3 | type ScanDirection uint8 4 | type EdgeOfFlightLine uint8 5 | type ClassificationFlag uint8 6 | 7 | const ( 8 | ScanDirectionNegative ScanDirection = 0 9 | ScanDirectionPositive ScanDirection = 1 10 | ) 11 | 12 | const ( 13 | EOFNormal EdgeOfFlightLine = 0 14 | EOFEndOfScan EdgeOfFlightLine = 1 15 | ) 16 | 17 | const ( 18 | ClassificationSynthetic ClassificationFlag = 0 19 | ClassificationKeyPoint ClassificationFlag = 1 20 | ClassificationWitheld ClassificationFlag = 2 21 | ClassificationOverlap ClassificationFlag = 3 22 | ) 23 | 24 | // Point stores the raw data of a generic LAS Point. It is compatible with all LAS 1.1+ point formats, 25 | // although some fields might be set at the default value for point formats that do not support them. 26 | // The X, Y and Z coordinates stored are meant to be stored already scaled and offseted by the scale 27 | // and offset values stored in the LAS header. 28 | type Point struct { 29 | X float64 30 | Y float64 31 | Z float64 32 | Intensity uint16 33 | Classification uint8 34 | ScanAngleRank int8 35 | UserData uint8 36 | ScanAngle int16 37 | PointSourceID uint16 38 | CustomData []byte 39 | GPSTime float64 40 | Red uint16 41 | Green uint16 42 | Blue uint16 43 | NIR uint16 44 | WavePacketDescriptorIndex uint8 45 | ByteOffsetToWaveformData uint64 46 | WaveformPacketSizeBytes uint32 47 | ReturnPointWaveformLocation float32 48 | ParametricDx float32 49 | ParametricDy float32 50 | ParametricDz float32 51 | PointDataRecordFormat uint8 52 | flags1 byte 53 | flags2 byte 54 | classificationRaw byte 55 | } 56 | 57 | func (p Point) ReturnNumber() uint8 { 58 | if p.PointDataRecordFormat < 6 { 59 | return p.flags1 & 0b111 60 | } 61 | return p.flags1 & 0b1111 62 | } 63 | 64 | func (p Point) NumberOfReturns() uint8 { 65 | if p.PointDataRecordFormat < 6 { 66 | return (p.flags1 & 0b111000) >> 3 67 | } 68 | return (p.flags1 & 0b11110000) >> 4 69 | } 70 | 71 | func (p Point) ScanDirectionFlag() ScanDirection { 72 | if p.PointDataRecordFormat < 6 { 73 | return ScanDirection((p.flags1 & 0b1000000) >> 6) 74 | } 75 | return ScanDirection((p.flags2 & 0b1000000) >> 6) 76 | } 77 | 78 | func (p Point) EdgeOfFlightLineFlag() EdgeOfFlightLine { 79 | if p.PointDataRecordFormat < 6 { 80 | return EdgeOfFlightLine((p.flags1 & 0b10000000) >> 7) 81 | } 82 | return EdgeOfFlightLine((p.flags2 & 0b10000000) >> 7) 83 | } 84 | 85 | func (p Point) ScannerChannel() uint8 { 86 | if p.PointDataRecordFormat < 6 { 87 | return 0 88 | } 89 | return (p.flags2 & 0b110000) >> 4 90 | } 91 | 92 | func (p Point) ClassificationFlags() []ClassificationFlag { 93 | flags := []ClassificationFlag{} 94 | if p.PointDataRecordFormat < 6 { 95 | if (p.classificationRaw>>5)&0b1 == 1 { 96 | flags = append(flags, ClassificationSynthetic) 97 | } 98 | if (p.classificationRaw>>6)&0b1 == 1 { 99 | flags = append(flags, ClassificationKeyPoint) 100 | } 101 | if (p.classificationRaw>>7)&0b1 == 1 { 102 | flags = append(flags, ClassificationWitheld) 103 | } 104 | return flags 105 | } 106 | if (p.flags2 & 0b1) == 1 { 107 | flags = append(flags, ClassificationSynthetic) 108 | } 109 | if (p.flags2>>1)&0b1 == 1 { 110 | flags = append(flags, ClassificationKeyPoint) 111 | } 112 | if (p.flags2>>2)&0b1 == 1 { 113 | flags = append(flags, ClassificationWitheld) 114 | } 115 | if (p.flags2>>3)&0b1 == 1 { 116 | flags = append(flags, ClassificationOverlap) 117 | } 118 | return flags 119 | } 120 | -------------------------------------------------------------------------------- /share/other.extra: -------------------------------------------------------------------------------- 1 | ## NAD83 / BC Albers (this has been superseded but is kept for compatibility) 2 | <42102> +proj=aea +ellps=GRS80 +lat_0=45 +lon_0=-126.0 +lat_1=50.0 +lat_2=58.5 +x_0=1000000.0 +y_0=0 +datum=NAD83 +units=m no_defs <> 3 | 4 | 5 | # 6 | # OGC-defined extended codes (41000--41999) 7 | # see http://www.digitalearth.gov/wmt/auto.html 8 | # 9 | # WGS84 / Simple Mercator 10 | <41001> +proj=merc +lat_ts=0 +lon_0=0 +k=1.000000 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> 11 | # 12 | # CubeWerx-defined extended codes (42100--42199) 13 | # 14 | # WGS 84 / LCC Canada 15 | <42101> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=0 +lon_0=-95 +x_0=0 +y_0=-8000000 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> 16 | #EPSG:42102,"PROJCS[\"NAD83 / BC Albers\",GEOGCS[\"NAD83\",DATUM[\"North_American_Datum_1983\",SPHEROID[\"GRS_1980\",6378137,298.257222101]],PRIMEM[\"Greenwich\",0],UNIT[\"Decimal_Degree\",0.0174532925199433]],PROJECTION[\"Albers_conic_equal_area\"],PARAMETER[\"central_meridian\",-126.0],PARAMETER[\"latitude_of_origin\",45],PARAMETER[\"standard_parallel_1\",50.0],PARAMETER[\"standard_parallel_2\",58.5],PARAMETER[\"false_easting\",1000000.0],PARAMETER[\"false_northing\",0],UNIT[\"Meter\",1]]" 17 | # WGS 84 / LCC USA 18 | <42103> +proj=lcc +lat_1=33 +lat_2=45 +lat_0=0 +lon_0=-100 +x_0=0 +y_0=0 +ellps=WGS72 +datum=WGS84 +units=m +no_defs no_defs <> 19 | # NAD83 / MTM zone 8 Québec 20 | <42104> +proj=tmerc +lat_0=0 +lon_0=-73.5 +k=0.999900 +x_0=304800 +y_0=0 +ellps=GRS80 +units=m +no_defs no_defs <> 21 | # WGS84 / Merc NorthAm 22 | <42105> +proj=merc +lat_ts=0 +lon_0=-96 +k=1.000000 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs no_defs <> 23 | # WGS84 / Lambert Azim Mozambique 24 | <42106> +proj=laea +lat_0=5 +lon_0=20 +x_0=0 +y_0=0 +a=6370997 +b=6370997 +datum=WGS84 +units=m +no_defs no_defs <> 25 | # 26 | # CubeWerx-customer definitions (42300--42399) 27 | # 28 | # NAD27 / Polar Stereographic / CM=-98 29 | <42301> +proj=stere +lat_0=90 +lon_0=-98 +x_0=0 +y_0=0 +ellps=clrk66 +datum=NAD27 +units=m +no_defs no_defs <> 30 | # JapanOrtho.09 09 31 | <42302> +proj=tmerc +lat_0=36 +lon_0=139.833333333333 +k=0.999900 +x_0=0 +y_0=0 +ellps=bessel +units=m +no_defs no_defs <> 32 | # NAD83 / Albers NorthAm 33 | <42303> +proj=aea +lat_1=29.5 +lat_2=45.5 +lat_0=23 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> 34 | # NAD83 / NRCan LCC Canada 35 | <42304> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=49 +lon_0=-95 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> 36 | # France_II 37 | <42305> +proj=lcc +lat_1=45.898918964419 +lat_2=47.696014502038 +lat_0=46.8 +lon_0=2.337229166666667 +x_0=600000 +y_0=2200000 +a=6378249.2 +b=6356514.999904194 +pm=2.337229166666667 +units=m +no_defs no_defs <> 38 | # NAD83/QC_LCC 39 | <42306> +proj=lcc +lat_1=46 +lat_2=60 +lat_0=44 +lon_0=-68.5 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> 40 | # NAD83 / Texas Central - feet 41 | <42307> +proj=lcc +lat_1=31.8833333333333 +lat_2=30.1166666666667 +lat_0=29.6666666666667 +lon_0=-100.333333333333 +x_0=700000.0000000001 +y_0=3000000 +ellps=GRS80 +datum=NAD83 +to_meter=0.3048006096012192 +no_defs no_defs <> 42 | # NAD27 / California Albers 43 | <42308> +proj=aea +lat_1=34 +lat_2=40.5 +lat_0=0 +lon_0=-120 +x_0=0 +y_0=-4000000 +ellps=clrk66 +datum=NAD27 +units=m +no_defs no_defs <> 44 | # NAD 83 / LCC Canada AVHRR-2 45 | <42309> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=0 +lon_0=-95 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> 46 | # WGS84+GRS80 / Mercator 47 | <42310> +proj=merc +lat_ts=0 +lon_0=0 +k=1.000000 +x_0=0 +y_0=0 +ellps=GRS80 +datum=WGS84 +units=m +no_defs no_defs <> 48 | # NAD83 / LCC Statcan 49 | <42311> +proj=lcc +lat_1=49 +lat_2=77 +lat_0=63.390675 +lon_0=-91.86666700000001 +x_0=6200000 +y_0=3000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs no_defs <> 50 | # 51 | # Funny epsgish code for google mercator - you should really use EPSG:3857 52 | # 53 | <900913> +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs <> 54 | -------------------------------------------------------------------------------- /internal/las/reader.go: -------------------------------------------------------------------------------- 1 | package las 2 | 3 | import ( 4 | "fmt" 5 | "io" 6 | "os" 7 | "sync/atomic" 8 | 9 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 10 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/las/golas" 11 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 12 | ) 13 | 14 | // LasReader wraps 15 | type LasReader interface { 16 | // NumberOfPoints returns the number of points stored in the LAS file 17 | NumberOfPoints() int 18 | // GetNext returns the next point in the las file 19 | GetNext() (geom.Point64, error) 20 | // GetCRS returns a string defining the CRS. This is typically a string of the form EPSG:XYZ where XYZ is the EPSG code of the CRS. 21 | GetCRS() string 22 | // Close closes the reader 23 | Close() 24 | } 25 | 26 | // CombinedFileLasReader enables reading a a list of LAS files as if they were a single one 27 | // the files MUST have the same properties (SRID, etc) 28 | type CombinedFileLasReader struct { 29 | currentReader atomic.Int32 30 | readers []LasReader 31 | numPts int 32 | crs string 33 | } 34 | 35 | // NewCombinedFileReader creates a new file reader for the files passed as input. If crs is the empty string, the 36 | // reader will autodetect the CRS from the input files, however an error is returned if the CRS is not consistent across 37 | // all of them or if it's not found in the files. 38 | func NewCombinedFileLasReader(files []string, crs string, eightBitColor bool) (*CombinedFileLasReader, error) { 39 | r := &CombinedFileLasReader{} 40 | crsProvided := crs != "" 41 | for _, f := range files { 42 | fr, err := NewGoLasReader(f, crs, eightBitColor) 43 | if err != nil { 44 | return nil, err 45 | } 46 | r.numPts += fr.NumberOfPoints() 47 | r.readers = append(r.readers, fr) 48 | if !crsProvided { 49 | if crs != "" && crs != fr.GetCRS() { 50 | return nil, fmt.Errorf("no CRS was provided and inconsistent CRS were detected:\n%s\n\n and\n\n%s", crs, fr.GetCRS()) 51 | } 52 | crs = fr.GetCRS() 53 | } 54 | } 55 | r.crs = crs 56 | return r, nil 57 | } 58 | 59 | func (m *CombinedFileLasReader) NumberOfPoints() int { 60 | return m.numPts 61 | } 62 | 63 | func (m *CombinedFileLasReader) GetCRS() string { 64 | return m.crs 65 | } 66 | 67 | func (m *CombinedFileLasReader) GetNext() (geom.Point64, error) { 68 | for { 69 | currReader := int(m.currentReader.Load()) 70 | if currReader >= len(m.readers) { 71 | return geom.Point64{}, io.EOF 72 | } 73 | pt, err := m.readers[currReader].GetNext() 74 | if err != nil { 75 | // try to move on to the next reader 76 | m.currentReader.CompareAndSwap(int32(currReader), int32(currReader)+1) 77 | continue 78 | } 79 | return pt, nil 80 | } 81 | } 82 | 83 | func (m *CombinedFileLasReader) Close() { 84 | for _, r := range m.readers { 85 | r.Close() 86 | } 87 | } 88 | 89 | // GoLasReader wraps a golas.Las object implementing the specific interface LasReader required by gocesiumtiler 90 | type GoLasReader struct { 91 | file *os.File 92 | f *golas.Las 93 | eightBitColor bool 94 | crs string 95 | } 96 | 97 | // NewGoLasReader returns a GoLasReader instance. If crs is empty the system will attempt to autodetect 98 | // the CRS from the LAS metadata and return an error in case of issues. 99 | func NewGoLasReader(fileName string, crs string, eightBitColor bool) (*GoLasReader, error) { 100 | f, err := os.Open(fileName) 101 | if err != nil { 102 | return nil, err 103 | } 104 | g, err := golas.NewLas(f) 105 | if err != nil { 106 | f.Close() 107 | return nil, err 108 | } 109 | if crs == "" { 110 | crs = g.CRS() 111 | if crs == "" { 112 | f.Close() 113 | return nil, fmt.Errorf("no CRS provided and was not possible to determine CRS from LAS file %s", fileName) 114 | } 115 | } 116 | return &GoLasReader{ 117 | file: f, 118 | f: g, 119 | eightBitColor: eightBitColor, 120 | crs: crs, 121 | }, nil 122 | } 123 | 124 | func (f *GoLasReader) NumberOfPoints() int { 125 | return int(f.f.NumberOfPoints()) 126 | } 127 | 128 | func (f *GoLasReader) GetCRS() string { 129 | return f.f.CRS() 130 | } 131 | 132 | func (f *GoLasReader) Close() { 133 | f.file.Close() 134 | } 135 | 136 | func (f *GoLasReader) GetNext() (geom.Point64, error) { 137 | pt, err := f.f.Next() 138 | if err != nil { 139 | return geom.Point64{}, err 140 | } 141 | var corr uint16 = 256 142 | if f.eightBitColor { 143 | corr = 1 144 | } 145 | return geom.Point64{ 146 | Vector: model.Vector{ 147 | X: pt.X, 148 | Y: pt.Y, 149 | Z: pt.Z, 150 | }, 151 | R: uint8(pt.Red / corr), 152 | G: uint8(pt.Green / corr), 153 | B: uint8(pt.Blue / corr), 154 | Intensity: uint8(pt.Intensity), 155 | Classification: pt.Classification, 156 | }, nil 157 | } 158 | -------------------------------------------------------------------------------- /internal/writer/gltf.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "encoding/json" 5 | "math" 6 | "path" 7 | 8 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 9 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 10 | "github.com/qmuntal/gltf" 11 | "github.com/qmuntal/gltf/modeler" 12 | ) 13 | 14 | // Intensity and Classifications are stored using the EXT_structural_metadata 15 | // GLTF extension. The following is the static schema that defines such properties and 16 | // links them the the _INTENSITY and _CLASSIFICATION point 17 | var extJson = ` 18 | { 19 | "schema": { 20 | "id": "pts_schema", 21 | "name": "pts_schema", 22 | "description": "point cloud point attribute schema", 23 | "version": "1.0.0", 24 | "classes": { 25 | "point": { 26 | "name": "point", 27 | "description": "Properties of point cloud points", 28 | "properties": { 29 | "INTENSITY": { 30 | "description": "Laser intensity", 31 | "type": "SCALAR", 32 | "componentType": "UINT16", 33 | "required": true 34 | }, 35 | "CLASSIFICATION": { 36 | "description": "Point classification", 37 | "type": "SCALAR", 38 | "componentType": "UINT16", 39 | "required": true 40 | } 41 | } 42 | } 43 | } 44 | }, 45 | "propertyAttributes": [ 46 | { 47 | "class": "point", 48 | "properties": { 49 | "INTENSITY": { 50 | "attribute": "_INTENSITY" 51 | }, 52 | "CLASSIFICATION": { 53 | "attribute": "_CLASSIFICATION" 54 | } 55 | } 56 | } 57 | ] 58 | } 59 | ` 60 | 61 | // GltfEncoder writes a node data as Gltf/Glb binary file (3D Tiles 1.1 specs) 62 | // Encodes intensity and classification using the EXT_structural_metadata GLTF extension 63 | type GltfEncoder struct{} 64 | 65 | func (e *GltfEncoder) TilesetVersion() version.TilesetVersion { 66 | return version.TilesetVersion_1_1 67 | } 68 | 69 | func (e *GltfEncoder) Filename() string { 70 | return "content.glb" 71 | } 72 | 73 | func NewGltfEncoder() *GltfEncoder { 74 | return &GltfEncoder{} 75 | } 76 | 77 | func (e *GltfEncoder) Write(node tree.Node, folderPath string) error { 78 | pts := node.Points() 79 | 80 | doc := gltf.NewDocument() 81 | doc.Asset = gltf.Asset{ 82 | Generator: "gocesiumtiler", 83 | Version: "2.0", 84 | } 85 | 86 | coords := make([][3]float32, pts.Len()) 87 | colors := make([][3]uint8, pts.Len()) 88 | 89 | // Note: for some reason uint8 results in an invalid GLTF being generated 90 | intensities := make([]uint16, pts.Len()) 91 | classifications := make([]uint16, pts.Len()) 92 | for i := 0; i < pts.Len(); i++ { 93 | pt, err := pts.Next() 94 | if err != nil { 95 | return err 96 | } 97 | coords[i][0] = pt.X 98 | coords[i][1] = pt.Y 99 | coords[i][2] = pt.Z 100 | 101 | // LAS colors are typically in the sRGB space, however GLTF specs require 102 | // COLOR_0 for meshes to be in the linear RGB space, hence we need to convert 103 | // the colors back to linear RGB 104 | colors[i][0] = uint8(math.Pow((float64(pt.R)/255), 2.2) * 255) 105 | colors[i][1] = uint8(math.Pow((float64(pt.G)/255), 2.2) * 255) 106 | colors[i][2] = uint8(math.Pow((float64(pt.B)/255), 2.2) * 255) 107 | intensities[i] = uint16(pt.Intensity) 108 | classifications[i] = uint16(pt.Classification) 109 | } 110 | 111 | attrs, err := modeler.WriteAttributesInterleaved(doc, modeler.Attributes{ 112 | Position: coords, 113 | Color: colors, 114 | CustomAttributes: []modeler.CustomAttribute{ 115 | {Name: "_INTENSITY", Data: intensities}, 116 | {Name: "_CLASSIFICATION", Data: classifications}, 117 | }, 118 | }) 119 | if err != nil { 120 | return err 121 | } 122 | 123 | // When both featureId.attribute and featureId.texture are undefined, then the feature ID value 124 | // for each vertex is given implicitly, via the index of the vertex. 125 | // In this case, the featureCount must match the number of vertices of the mesh primitive. 126 | doc.Meshes = []*gltf.Mesh{{ 127 | Name: "PointCloud", 128 | Primitives: []*gltf.Primitive{{ 129 | Mode: gltf.PrimitivePoints, 130 | Attributes: attrs, 131 | Extensions: gltf.Extensions{ 132 | "EXT_structural_metadata": json.RawMessage(`{"propertyAttributes": [0]}`), 133 | }, 134 | }}, 135 | }} 136 | // gltf is Y up, however Cesium is Z up. This means that a rotation transform needs to be applied. 137 | doc.Nodes = []*gltf.Node{ 138 | { 139 | Name: "PointCloud", 140 | Mesh: gltf.Index(0), 141 | Matrix: [16]float64{1, 0, 0, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 1}, 142 | }, 143 | } 144 | doc.Scenes[0].Nodes = append(doc.Scenes[0].Nodes, 0) 145 | doc.Extensions = gltf.Extensions{ 146 | "EXT_structural_metadata": json.RawMessage(extJson), 147 | } 148 | doc.ExtensionsUsed = []string{ 149 | "EXT_structural_metadata", 150 | } 151 | 152 | pntsFilePath := path.Join(folderPath, e.Filename()) 153 | return gltf.SaveBinary(doc, pntsFilePath) 154 | } 155 | -------------------------------------------------------------------------------- /tiler/tiler_test.go: -------------------------------------------------------------------------------- 1 | package tiler 2 | 3 | import ( 4 | "context" 5 | "os" 6 | "path/filepath" 7 | "reflect" 8 | "testing" 9 | 10 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/las" 11 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 12 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree/grid" 13 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 14 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/writer" 15 | ) 16 | 17 | func TestTilerDefaults(t *testing.T) { 18 | tiler, err := NewGoCesiumTiler() 19 | if err != nil { 20 | t.Fatalf("unexpected error: %v", err) 21 | } 22 | tr := tiler.treeProvider(NewDefaultTilerOptions()) 23 | switch tr.(type) { 24 | case *grid.Node: 25 | default: 26 | t.Errorf("unexpected tree type returned") 27 | } 28 | // this returns an error due to a non-esitant path 29 | // but we ignore it on purpose for the sake of this test 30 | l, _ := tiler.lasReaderProvider([]string{""}, "EPSG:123", true) 31 | switch l.(type) { 32 | case *las.CombinedFileLasReader: 33 | default: 34 | t.Errorf("unexpected las reader type returned") 35 | } 36 | // this returns an error due to a non-esitant path 37 | // but we ignore it on purpose for the sake of this test 38 | w, err := tiler.writerProvider("", NewDefaultTilerOptions()) 39 | if err != nil { 40 | t.Fatalf("unexpected error: %v", err) 41 | } 42 | switch w.(type) { 43 | case *writer.StandardWriter: 44 | default: 45 | t.Errorf("unexpected writer type returned") 46 | } 47 | } 48 | 49 | func TestTilerProcessFile(t *testing.T) { 50 | tiler, err := NewGoCesiumTiler() 51 | if err != nil { 52 | t.Fatalf("unexpected error: %v", err) 53 | } 54 | w := &writer.MockWriter{} 55 | tr := &tree.MockNode{} 56 | l := &las.MockLasReader{} 57 | opts := NewDefaultTilerOptions() 58 | c := context.TODO() 59 | tiler.writerProvider = func(folder string, opts *TilerOptions) (writer.Writer, error) { 60 | return w, nil 61 | } 62 | tiler.treeProvider = func(opts *TilerOptions) tree.Tree { 63 | return tr 64 | } 65 | tiler.lasReaderProvider = func(inputLasFiles []string, sourceCRS string, eightbit bool) (las.LasReader, error) { 66 | return l, nil 67 | } 68 | 69 | tiler.ProcessFiles([]string{"abc.las"}, "out", "EPSG:123", opts, c) 70 | if !tr.LoadCalled { 71 | t.Errorf("Load was not called on the tree") 72 | } 73 | if actual := tr.Las; actual != l { 74 | t.Errorf("expected las reader %v got %v", l, actual) 75 | } 76 | if actual := tr.ConvFactory; actual == nil { 77 | t.Errorf("expected non-nil coordinate converter factory") 78 | } 79 | if actual := tr.Mut; actual == nil { 80 | t.Errorf("expected non-nil mutator") 81 | } 82 | if actual := tr.Ctx; actual != c { 83 | t.Errorf("expected different context") 84 | } 85 | if !tr.BuildCalled { 86 | t.Errorf("Build was not called on the tree") 87 | } 88 | if !w.WriteCalled { 89 | t.Errorf("Write was not called on the writer") 90 | } 91 | if actual := w.Tr; actual != tr { 92 | t.Errorf("expected tree %v got %v", tr, actual) 93 | } 94 | if actual := w.FolderName; actual != "" { 95 | t.Errorf("expected folder name '%v' got %v", "", actual) 96 | } 97 | if actual := w.Ctx; actual != c { 98 | t.Errorf("expected different context") 99 | } 100 | } 101 | 102 | func TestTilerProcessFolder(t *testing.T) { 103 | tiler, err := NewGoCesiumTiler() 104 | if err != nil { 105 | t.Fatalf("unexpected error: %v", err) 106 | } 107 | w := &writer.MockWriter{} 108 | tr := &tree.MockNode{} 109 | l := &las.MockLasReader{} 110 | opts := NewDefaultTilerOptions() 111 | c := context.TODO() 112 | tiler.writerProvider = func(folder string, opts *TilerOptions) (writer.Writer, error) { 113 | return w, nil 114 | } 115 | tiler.treeProvider = func(opts *TilerOptions) tree.Tree { 116 | return tr 117 | } 118 | files := []string{} 119 | tiler.lasReaderProvider = func(inputLasFiles []string, sourceCRS string, eightbit bool) (las.LasReader, error) { 120 | files = append(files, inputLasFiles...) 121 | return l, nil 122 | } 123 | 124 | tmp, err := os.MkdirTemp(os.TempDir(), "tst") 125 | if err != nil { 126 | t.Fatalf("unexpected error %v", err) 127 | } 128 | t.Cleanup(func() { 129 | os.RemoveAll(tmp) 130 | }) 131 | utils.TouchFile(filepath.Join(tmp, "abc.las")) 132 | utils.TouchFile(filepath.Join(tmp, "def.xyz")) 133 | utils.TouchFile(filepath.Join(tmp, "ghi.las")) 134 | tiler.ProcessFolder(tmp, "out", "EPSG:123", opts, c) 135 | if !tr.LoadCalled { 136 | t.Errorf("Load was not called on the tree") 137 | } 138 | if actual := tr.Las; actual != l { 139 | t.Errorf("expected las reader %v got %v", l, actual) 140 | } 141 | if actual := tr.ConvFactory; actual == nil { 142 | t.Errorf("expected non-nil coordinate converter factory") 143 | } 144 | if actual := tr.Mut; actual == nil { 145 | t.Errorf("expected non-nil mutator") 146 | } 147 | if actual := tr.Ctx; actual != c { 148 | t.Errorf("expected different context") 149 | } 150 | if !tr.BuildCalled { 151 | t.Errorf("Build was not called on the tree") 152 | } 153 | if !w.WriteCalled { 154 | t.Errorf("Write was not called on the writer") 155 | } 156 | if actual := w.Tr; actual != tr { 157 | t.Errorf("expected tree %v got %v", tr, actual) 158 | } 159 | if actual := w.FolderName; actual != "" { 160 | t.Errorf("expected folder name '%v' got %v", "", actual) 161 | } 162 | if actual := w.Ctx; actual != c { 163 | t.Errorf("expected different context") 164 | } 165 | expected := []string{ 166 | filepath.Join(tmp, "abc.las"), 167 | filepath.Join(tmp, "ghi.las"), 168 | } 169 | if !reflect.DeepEqual(files, expected) { 170 | t.Errorf("expected files processed %v, got %v", files, expected) 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /share/ITRF2008: -------------------------------------------------------------------------------- 1 | # ITRF2008 params are in mm/year, PJ_helmert uses m/year 2 | +version=1.0.0 +origin=http://itrf.ign.fr/doc_ITRF/Transfo-ITRF2008_ITRFs.txt +lastupdate=2017-07-26 3 | 4 | +proj=helmert +x=-0.002 +y=-0.0009 +z=-0.0047 +s=0.00094 +dx=0.0003 +t_epoch=2000.0 +convention=position_vector 5 | 6 | +proj=helmert +x=-0.0019 +y=-0.0017 +z=-0.0105 +s=0.00134 +dx=0.0001 +dy=0.0001 +dz=-0.0018 +ds=0.00008 +t_epoch=2000.0 +convention=position_vector 7 | 8 | +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 9 | 10 | +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 11 | 12 | +proj=helmert +x=0.0048 +y=0.0026 +z=-0.0332 +s=0.00292 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 13 | 14 | +proj=helmert +x=-0.024 +y=0.0024 +z=-0.00386 +s=0.00341 +rx=-0.00171 +ry=-0.00148 +rz=-0.0003 +dx=-0.0028 +dy=-0.0001 +dz=-0.0024 +ds=0.00009 +drx=-0.00011 +dry=-0.00019 +drz=0.00007 +t_epoch=2000.0 +convention=position_vector 15 | 16 | +proj=helmert +x=0.0128 +y=0.0046 +z=-0.0412 +s=0.00221 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 17 | 18 | +proj=helmert +x=0.0248 +y=0.0186 +z=-0.0472 +s=0.00361 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 19 | 20 | +proj=helmert +x=0.0228 +y=0.0146 +z=-0.0632 +s=0.00391 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 21 | 22 | +proj=helmert +x=0.0278 +y=0.0386 +z=-0.1012 +s=0.00731 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 23 | 24 | +proj=helmert +x=0.0228 +y=0.0026 +z=-0.1252 +s=0.01041 +rz=0.00006 +dx=0.0001 +dy=-0.0005 +dz=-0.0032 +ds=0.00009 +drz=0.00002 +t_epoch=2000.0 +convention=position_vector 25 | 26 | 27 | # ITRF2008 Plate Motion Model parameters 28 | # 29 | # As described in 30 | # 31 | # Altamimi, Z., L. Métivier, and X. Collilieux (2012), ITRF2008 plate motion model, 32 | # J. Geophys. Res., 117, B07402, doi:10.1029/2011JB008930. 33 | 34 | 35 | +proj=helmert +drx=-0.000190 +dry=-0.000442 +drz=0.000915 +convention=position_vector 36 | 37 | +proj=helmert +drx=-0.000252 +dry=-0.000302 +drz=0.000643 +convention=position_vector 38 | 39 | +proj=helmert +drx=0.001202 +dry=-0.000054 +drz=0.001485 +convention=position_vector 40 | 41 | +proj=helmert +drx=0.001504 +dry=0.001172 +drz=0.001228 +convention=position_vector 42 | 43 | +proj=helmert +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector 44 | 45 | +proj=helmert +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector 46 | 47 | +proj=helmert +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector 48 | 49 | +proj=helmert +drx=-0.000330 +dry=-0.001551 +drz=0.001625 +convention=position_vector 50 | 51 | +proj=helmert +drx=0.000035 +dry=-0.000662 +drz=-0.0001 +convention=position_vector 52 | 53 | +proj=helmert +drx=0.000095 +dry=-0.000598 +drz=0.000723 +convention=position_vector 54 | 55 | +proj=helmert +drx=-0.000411 +dry=0.001036 +drz=-0.002166 +convention=position_vector 56 | 57 | +proj=helmert +drx=-0.000243 +dry=-0.000311 +drz=-0.000154 +convention=position_vector 58 | 59 | +proj=helmert +drx=-0.000080 +dry=-0.000745 +drz=0.000897 +convention=position_vector 60 | 61 | +proj=helmert +drx=0.000047 +dry=-0.001 +drz=0.000975 +convention=position_vector 62 | 63 | 64 | # Plate names suffixed by _T (for Translation) that includes the translation 65 | # rates +dx=0.00041 +dy=0.00022 +dz=0.00041 given by Table 2 of the ITRF2008 plate motion model 66 | # paper 67 | 68 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000190 +dry=-0.000442 +drz=0.000915 +convention=position_vector 69 | 70 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000252 +dry=-0.000302 +drz=0.000643 +convention=position_vector 71 | 72 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001202 +dry=-0.000054 +drz=0.001485 +convention=position_vector 73 | 74 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001504 +dry=0.001172 +drz=0.001228 +convention=position_vector 75 | 76 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector 77 | 78 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector 79 | 80 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector 81 | 82 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000330 +dry=-0.001551 +drz=0.001625 +convention=position_vector 83 | 84 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000035 +dry=-0.000662 +drz=-0.0001 +convention=position_vector 85 | 86 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000095 +dry=-0.000598 +drz=0.000723 +convention=position_vector 87 | 88 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000411 +dry=0.001036 +drz=-0.002166 +convention=position_vector 89 | 90 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000243 +dry=-0.000311 +drz=-0.000154 +convention=position_vector 91 | 92 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000080 +dry=-0.000745 +drz=0.000897 +convention=position_vector 93 | 94 | +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000047 +dry=-0.001 +drz=0.000975 +convention=position_vector 95 | -------------------------------------------------------------------------------- /share/ITRF2020: -------------------------------------------------------------------------------- 1 | # ITRF2020 params are in mm/year, PJ_helmert uses m/year 2 | 3 | # Generated with generate_itrf2020.py from EPSG database 4 | 5 | +proj=helmert +x=-0.0014 +y=-0.0009 +z=0.0014 +s=-0.00042 +dy=-0.0001 +dz=0.0002 +t_epoch=2015 +convention=position_vector 6 | 7 | +proj=helmert +x=0.0002 +y=0.001 +z=0.0033 +s=-0.00029 +dy=-0.0001 +dz=0.0001 +ds=3e-05 +t_epoch=2015 +convention=position_vector 8 | 9 | +proj=helmert +x=0.0027 +y=0.0001 +z=-0.0014 +s=0.00065 +dx=0.0003 +dy=-0.0001 +dz=0.0001 +ds=3e-05 +t_epoch=2015 +convention=position_vector 10 | 11 | +proj=helmert +x=-0.0002 +y=0.0008 +z=-0.0342 +s=0.00225 +dx=0.0001 +dz=-0.0017 +ds=0.00011 +t_epoch=2015 +convention=position_vector 12 | 13 | +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 14 | 15 | +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 16 | 17 | +proj=helmert +x=0.0065 +y=-0.0039 +z=-0.0779 +rz=0.00036 +s=0.00398 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 18 | 19 | +proj=helmert +x=-0.0658 +y=0.0019 +z=-0.0713 +rx=-0.00336 +ry=-0.00433 +rz=0.00075 +s=0.00447 +dx=-0.0028 +dy=-0.0002 +dz=-0.0023 +drx=-0.00011 +dry=-0.00019 +drz=7e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 20 | 21 | +proj=helmert +x=0.0145 +y=-0.0019 +z=-0.0859 +rz=0.00036 +s=0.00327 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 22 | 23 | +proj=helmert +x=0.0265 +y=0.0121 +z=-0.0919 +rz=0.00036 +s=0.00467 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 24 | 25 | +proj=helmert +x=0.0245 +y=0.0081 +z=-0.1079 +rz=0.00036 +s=0.00497 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 26 | 27 | +proj=helmert +x=0.0295 +y=0.0321 +z=-0.1459 +rz=0.00036 +s=0.00837 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 28 | 29 | +proj=helmert +x=0.0245 +y=-0.0039 +z=-0.1699 +rx=0.0001 +rz=0.00036 +s=0.01147 +dx=0.0001 +dy=-0.0006 +dz=-0.0031 +drz=2e-05 +ds=0.00012 +t_epoch=2015 +convention=position_vector 30 | 31 | # ITRF2020 Plate Motion Model parameters 32 | # 33 | # As described in https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2023GL106373 34 | # (and also in https://itrf.ign.fr/docs/solutions/itrf2020/ITRF2020-PMM.dat, but with 35 | # different units) 36 | 37 | +proj=helmert +drx=-0.000131 +dry=-0.000551 +drz=0.000837 +convention=position_vector 38 | 39 | +proj=helmert +drx=-0.000269 +dry=-0.000312 +drz=0.000678 +convention=position_vector 40 | 41 | +proj=helmert +drx=0.001129 +dry=-0.000146 +drz=0.001438 +convention=position_vector 42 | 43 | +proj=helmert +drx=0.001487 +dry=0.001175 +drz=0.001223 +convention=position_vector 44 | 45 | +proj=helmert +drx=0.000207 +dry=-0.001422 +drz=0.000726 +convention=position_vector 46 | 47 | +proj=helmert +drx=-0.000085 +dry=-0.000519 +drz=0.000753 +convention=position_vector 48 | 49 | +proj=helmert +drx=0.001137 +dry=0.000013 +drz=0.001444 +convention=position_vector 50 | 51 | +proj=helmert +drx=-0.000327 +dry=-0.001561 +drz=0.001605 +convention=position_vector 52 | 53 | +proj=helmert +drx=0.000045 +dry=-0.000666 +drz=-0.000098 +convention=position_vector 54 | 55 | +proj=helmert +drx=0.000090 +dry=-0.000585 +drz=0.000717 +convention=position_vector 56 | 57 | +proj=helmert +drx=-0.000404 +dry=0.001021 +drz=-0.002154 +convention=position_vector 58 | 59 | +proj=helmert +drx=-0.000261 +dry=-0.000282 +drz=-0.000157 +convention=position_vector 60 | 61 | +proj=helmert +drx=-0.000081 +dry=-0.000719 +drz=0.000864 +convention=position_vector 62 | 63 | # Plate names suffixed by _T (for Translation) that includes the translation 64 | # rates +dx=0.00037 +dy=0.00035 +dz=0.00074 given by Table 2 of the ITRF2020 plate motion model 65 | # paper 66 | 67 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000131 +dry=-0.000551 +drz=0.000837 +convention=position_vector 68 | 69 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000269 +dry=-0.000312 +drz=0.000678 +convention=position_vector 70 | 71 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001129 +dry=-0.000146 +drz=0.001438 +convention=position_vector 72 | 73 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001487 +dry=0.001175 +drz=0.001223 +convention=position_vector 74 | 75 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000207 +dry=-0.001422 +drz=0.000726 +convention=position_vector 76 | 77 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000085 +dry=-0.000519 +drz=0.000753 +convention=position_vector 78 | 79 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.001137 +dry=0.000013 +drz=0.001444 +convention=position_vector 80 | 81 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000327 +dry=-0.001561 +drz=0.001605 +convention=position_vector 82 | 83 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000045 +dry=-0.000666 +drz=-0.000098 +convention=position_vector 84 | 85 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=0.000090 +dry=-0.000585 +drz=0.000717 +convention=position_vector 86 | 87 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000404 +dry=0.001021 +drz=-0.002154 +convention=position_vector 88 | 89 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000261 +dry=-0.000282 +drz=-0.000157 +convention=position_vector 90 | 91 | +proj=helmert +dx=0.00037 +dy=0.00035 +dz=0.00074 +drx=-0.000081 +dry=-0.000719 +drz=0.000864 +convention=position_vector 92 | -------------------------------------------------------------------------------- /tiler/tiler.go: -------------------------------------------------------------------------------- 1 | package tiler 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "path/filepath" 7 | "strings" 8 | "time" 9 | 10 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor" 11 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/conv/coor/proj" 12 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/las" 13 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 14 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree/grid" 15 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 16 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/writer" 17 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 18 | ) 19 | 20 | type Tiler interface { 21 | ProcessFiles(inputLasFiles []string, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error 22 | ProcessFolder(inputFolder, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error 23 | } 24 | 25 | // GoCesiumTiler wraps the logic required to convert 26 | // LAS point clouds into Cesium 3D tiles 27 | type GoCesiumTiler struct { 28 | convFactory coor.ConverterFactory 29 | treeProvider 30 | writerProvider 31 | lasReaderProvider 32 | } 33 | 34 | type treeProvider func(opts *TilerOptions) tree.Tree 35 | type writerProvider func(folder string, opts *TilerOptions) (writer.Writer, error) 36 | type lasReaderProvider func(inputLasFiles []string, sourceCRS string, eightbit bool) (las.LasReader, error) 37 | 38 | // NewGoCesiumTiler returns a new tiler to be used to convert LAS files into Cesium 3D Tiles 39 | func NewGoCesiumTiler() (*GoCesiumTiler, error) { 40 | return &GoCesiumTiler{ 41 | convFactory: func() (coor.Converter, error) { 42 | return proj.NewProjCoordinateConverter() 43 | }, 44 | treeProvider: func(opts *TilerOptions) tree.Tree { 45 | return grid.NewTree( 46 | grid.WithGridSize(opts.gridSize), 47 | grid.WithMaxDepth(opts.maxDepth), 48 | grid.WithLoadWorkersNumber(opts.numWorkers), 49 | grid.WithMinPointsPerChildren(opts.minPointsPerTile), 50 | ) 51 | }, 52 | writerProvider: func(folder string, opts *TilerOptions) (writer.Writer, error) { 53 | return writer.NewWriter(folder, 54 | writer.WithNumWorkers(opts.numWorkers), 55 | writer.WithTilesetVersion(opts.version), 56 | ) 57 | }, 58 | lasReaderProvider: func(inputLasFiles []string, sourceCRS string, eightbit bool) (las.LasReader, error) { 59 | return las.NewCombinedFileLasReader(inputLasFiles, sourceCRS, eightbit) 60 | }, 61 | }, nil 62 | } 63 | 64 | // ProcessFolder converts all LAS files found in the provided input folder converting them into separate tilesets 65 | // each tileset is stored in a subdirectory in the outputFolder named after the filename. 66 | // If sourceCRS is left empty, the CRS will attempted to be autodetected from LAS GeoTIFF or WKT VLRs. 67 | func (t *GoCesiumTiler) ProcessFolder(inputFolder, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error { 68 | files, err := utils.FindLasFilesInFolder(inputFolder) 69 | if err != nil { 70 | return err 71 | } 72 | for _, f := range files { 73 | subfolderName := strings.TrimSuffix(filepath.Base(f), filepath.Ext(f)) 74 | err := t.ProcessFiles([]string{f}, filepath.Join(outputFolder, subfolderName), sourceCRS, opts, ctx) 75 | if err != nil { 76 | return err 77 | } 78 | } 79 | return nil 80 | } 81 | 82 | // ProcessFiles converts the specified LAS files as a single cesium tileset and stores them in the given output folder. 83 | // If sourceCRS is left empty, the CRS will attempted to be autodetected from LAS GeoTIFF or WKT VLRs. 84 | func (t *GoCesiumTiler) ProcessFiles(inputLasFiles []string, outputFolder string, sourceCRS string, opts *TilerOptions, ctx context.Context) error { 85 | start := time.Now() 86 | tr := t.treeProvider(opts) 87 | 88 | inputDesc := fmt.Sprintf("%d files", len(inputLasFiles)) 89 | if len(inputLasFiles) == 1 { 90 | inputDesc = inputLasFiles[0] 91 | } 92 | 93 | // PARSE LAS HEADER 94 | emitEvent(EventReadLasHeaderStarted, opts, start, inputDesc, "start reading las") 95 | lasFile, err := t.lasReaderProvider(inputLasFiles, sourceCRS, opts.eightBitColors) 96 | if err != nil { 97 | emitEvent(EventReadLasHeaderError, opts, start, inputDesc, fmt.Sprintf("las read error: %v", err)) 98 | return err 99 | } 100 | emitEvent(EventReadLasHeaderCompleted, opts, start, inputDesc, fmt.Sprintf("las header read completed: found %d points", lasFile.NumberOfPoints())) 101 | emitEvent(EventReadCRSDetected, opts, start, inputDesc, fmt.Sprintf("crs: %s", lasFile.GetCRS())) 102 | 103 | // LOAD POINTS 104 | emitEvent(EventPointLoadingStarted, opts, start, inputDesc, "point loading started") 105 | mutatorPipeline := mutator.NewPipeline(opts.mutators...) 106 | err = tr.Load(lasFile, t.convFactory, mutatorPipeline, ctx) 107 | if err != nil { 108 | emitEvent(EventPointLoadingError, opts, start, inputDesc, fmt.Sprintf("load error: %v", err)) 109 | return err 110 | } 111 | emitEvent(EventPointLoadingCompleted, opts, start, inputDesc, "point loading completed") 112 | 113 | // BUILD TREE 114 | emitEvent(EventBuildStarted, opts, start, inputDesc, "build started") 115 | err = tr.Build() 116 | if err != nil { 117 | emitEvent(EventBuildError, opts, start, inputDesc, fmt.Sprintf("build error: %v", err)) 118 | return err 119 | } 120 | emitEvent(EventBuildCompleted, opts, start, inputDesc, "build completed") 121 | 122 | // EXPORT 123 | emitEvent(EventExportStarted, opts, start, inputDesc, "export started") 124 | w, err := t.writerProvider(outputFolder, opts) 125 | if err != nil { 126 | emitEvent(EventBuildError, opts, start, inputDesc, fmt.Sprintf("export init error: %v", err)) 127 | return err 128 | } 129 | err = w.Write(tr, "", ctx) 130 | if err != nil { 131 | emitEvent(EventBuildError, opts, start, inputDesc, fmt.Sprintf("export error: %v", err)) 132 | return err 133 | } 134 | emitEvent(EventExportStarted, opts, start, inputDesc, fmt.Sprintf("export completed in %v seconds", time.Since(start).String())) 135 | return nil 136 | } 137 | 138 | func emitEvent(e TilerEvent, opts *TilerOptions, start time.Time, inputDesc string, msg string) { 139 | if opts.callback != nil { 140 | opts.callback(e, inputDesc, time.Since(start).Milliseconds(), msg) 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /internal/writer/writer_test.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "testing" 7 | 8 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 9 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 10 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 11 | ) 12 | 13 | func TestWriter(t *testing.T) { 14 | pt1 := &geom.LinkedPoint{ 15 | Pt: geom.NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 16 | } 17 | pt2 := &geom.LinkedPoint{ 18 | Pt: geom.NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 19 | } 20 | pt3 := &geom.LinkedPoint{ 21 | Pt: geom.NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 22 | } 23 | pt1.Next = pt2 24 | pt2.Next = pt3 25 | 26 | stream := geom.NewLinkedPointStream(pt1, 3) 27 | stream2 := geom.NewLinkedPointStream(pt2, 2) 28 | 29 | child := &tree.MockNode{ 30 | TotalNumPts: 2, 31 | Pts: stream2, 32 | } 33 | root := &tree.MockNode{ 34 | TotalNumPts: 5, 35 | Pts: stream, 36 | ChildNodes: [8]tree.Node{ 37 | nil, 38 | child, 39 | }, 40 | } 41 | 42 | w, err := NewWriter("base", 43 | WithNumWorkers(1), 44 | WithBufferRatio(10), 45 | ) 46 | if err != nil { 47 | t.Fatalf("unexpected error %v", err) 48 | } 49 | p := &MockProducer{} 50 | c := &MockConsumer{} 51 | w.producerFunc = func(basepath, folder string) Producer { 52 | return p 53 | } 54 | w.consumerFunc = func(v version.TilesetVersion) Consumer { 55 | return c 56 | } 57 | err = w.Write(root, "base", context.TODO()) 58 | if err != nil { 59 | t.Errorf("unexpected error: %v", err) 60 | } 61 | if p.Wc == nil { 62 | t.Errorf("empty work channel passed") 63 | } else { 64 | if c.Wc != p.Wc { 65 | t.Errorf("passed different work channel to consumer") 66 | } 67 | } 68 | if p.Ec == nil { 69 | t.Errorf("empty error channel passed") 70 | } else { 71 | if c.Ec != p.Ec { 72 | t.Errorf("passed different error channel to consumer") 73 | } 74 | } 75 | } 76 | 77 | func TestWriterWithProducerError(t *testing.T) { 78 | pt1 := &geom.LinkedPoint{ 79 | Pt: geom.NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 80 | } 81 | pt2 := &geom.LinkedPoint{ 82 | Pt: geom.NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 83 | } 84 | pt3 := &geom.LinkedPoint{ 85 | Pt: geom.NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 86 | } 87 | pt1.Next = pt2 88 | pt2.Next = pt3 89 | 90 | stream := geom.NewLinkedPointStream(pt1, 3) 91 | stream2 := geom.NewLinkedPointStream(pt2, 2) 92 | 93 | child := &tree.MockNode{ 94 | TotalNumPts: 2, 95 | Pts: stream2, 96 | } 97 | root := &tree.MockNode{ 98 | TotalNumPts: 5, 99 | Pts: stream, 100 | ChildNodes: [8]tree.Node{ 101 | nil, 102 | child, 103 | }, 104 | } 105 | 106 | w, err := NewWriter("base", 107 | WithNumWorkers(1), 108 | WithBufferRatio(10), 109 | ) 110 | if err != nil { 111 | t.Fatalf("unexpected error %v", err) 112 | } 113 | p := &MockProducer{ 114 | Err: fmt.Errorf("mock error"), 115 | } 116 | c := &MockConsumer{} 117 | w.producerFunc = func(basepath, folder string) Producer { 118 | return p 119 | } 120 | w.consumerFunc = func(v version.TilesetVersion) Consumer { 121 | return c 122 | } 123 | err = w.Write(root, "base", context.TODO()) 124 | if err == nil { 125 | t.Errorf("expected error but got none") 126 | } 127 | if p.Wc == nil { 128 | t.Errorf("empty work channel passed") 129 | } else { 130 | if c.Wc != p.Wc { 131 | t.Errorf("passed different work channel to consumer") 132 | } 133 | } 134 | if p.Ec == nil { 135 | t.Errorf("empty error channel passed") 136 | } else { 137 | if c.Ec != p.Ec { 138 | t.Errorf("passed different error channel to consumer") 139 | } 140 | } 141 | } 142 | 143 | func TestWriterWithConsumerError(t *testing.T) { 144 | pt1 := &geom.LinkedPoint{ 145 | Pt: geom.NewPoint(1, 2, 3, 4, 5, 6, 7, 8), 146 | } 147 | pt2 := &geom.LinkedPoint{ 148 | Pt: geom.NewPoint(9, 10, 11, 12, 13, 14, 15, 16), 149 | } 150 | pt3 := &geom.LinkedPoint{ 151 | Pt: geom.NewPoint(17, 18, 19, 20, 21, 22, 23, 24), 152 | } 153 | pt1.Next = pt2 154 | pt2.Next = pt3 155 | 156 | stream := geom.NewLinkedPointStream(pt1, 3) 157 | stream2 := geom.NewLinkedPointStream(pt2, 2) 158 | 159 | child := &tree.MockNode{ 160 | TotalNumPts: 2, 161 | Pts: stream2, 162 | } 163 | root := &tree.MockNode{ 164 | TotalNumPts: 5, 165 | Pts: stream, 166 | ChildNodes: [8]tree.Node{ 167 | nil, 168 | child, 169 | }, 170 | } 171 | 172 | w, err := NewWriter("base", 173 | WithNumWorkers(1), 174 | WithBufferRatio(10), 175 | ) 176 | if err != nil { 177 | t.Fatalf("unexpected error %v", err) 178 | } 179 | p := &MockProducer{} 180 | c := &MockConsumer{ 181 | Err: fmt.Errorf("mock error"), 182 | } 183 | w.producerFunc = func(basepath, folder string) Producer { 184 | return p 185 | } 186 | w.consumerFunc = func(v version.TilesetVersion) Consumer { 187 | return c 188 | } 189 | err = w.Write(root, "base", context.TODO()) 190 | if err == nil { 191 | t.Errorf("expected error but got none") 192 | } 193 | if p.Wc == nil { 194 | t.Errorf("empty work channel passed") 195 | } else { 196 | if c.Wc != p.Wc { 197 | t.Errorf("passed different work channel to consumer") 198 | } 199 | } 200 | if p.Ec == nil { 201 | t.Errorf("empty error channel passed") 202 | } else { 203 | if c.Ec != p.Ec { 204 | t.Errorf("passed different error channel to consumer") 205 | } 206 | } 207 | } 208 | 209 | func TestWriterTilesetVersion(t *testing.T) { 210 | w, err := NewWriter("base", 211 | WithNumWorkers(1), 212 | WithBufferRatio(10), 213 | WithTilesetVersion(version.TilesetVersion_1_0), 214 | ) 215 | if err != nil { 216 | t.Fatalf("unexpected error %v", err) 217 | } 218 | if w.version != version.TilesetVersion_1_0 { 219 | t.Errorf("unexpected tileset version") 220 | } 221 | c := w.consumerFunc(version.TilesetVersion_1_0) 222 | if _, success := (c.(*StandardConsumer).encoder).(*PntsEncoder); success != true { 223 | t.Errorf("unexpected geometry encoder for tileset version 1.0") 224 | } 225 | w, err = NewWriter("base", 226 | WithNumWorkers(1), 227 | WithBufferRatio(10), 228 | WithTilesetVersion(version.TilesetVersion_1_1), 229 | ) 230 | if err != nil { 231 | t.Fatalf("unexpected error %v", err) 232 | } 233 | if w.version != version.TilesetVersion_1_1 { 234 | t.Errorf("unexpected tileset version") 235 | } 236 | c = w.consumerFunc(version.TilesetVersion_1_1) 237 | if _, success := (c.(*StandardConsumer).encoder).(*GltfEncoder); success != true { 238 | t.Errorf("unexpected geometry encoder for tileset version 1.1") 239 | } 240 | } 241 | -------------------------------------------------------------------------------- /internal/writer/consumer.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "encoding/json" 5 | "errors" 6 | "fmt" 7 | "os" 8 | "path" 9 | "strconv" 10 | "sync" 11 | 12 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 13 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 14 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 15 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 16 | ) 17 | 18 | // GeometryEncoder encodes a tree.Node into a binary file, like a .pnts or .glb/.gltf files. 19 | type GeometryEncoder interface { 20 | Write(n tree.Node, folderPath string) error 21 | TilesetVersion() version.TilesetVersion 22 | Filename() string 23 | } 24 | 25 | type Consumer interface { 26 | Consume(workchan chan *WorkUnit, errchan chan error, waitGroup *sync.WaitGroup) 27 | } 28 | 29 | type StandardConsumer struct { 30 | encoder GeometryEncoder 31 | } 32 | 33 | func NewStandardConsumer(optFn ...func(*StandardConsumer)) Consumer { 34 | c := &StandardConsumer{ 35 | encoder: NewPntsEncoder(), 36 | } 37 | for _, fn := range optFn { 38 | fn(c) 39 | } 40 | return c 41 | } 42 | 43 | // WithGeometryEncoder sets the consumer geometry encoder to the given one 44 | func WithGeometryEncoder(e GeometryEncoder) func(*StandardConsumer) { 45 | return func(c *StandardConsumer) { 46 | c.encoder = e 47 | } 48 | } 49 | 50 | // Continually consumes WorkUnits submitted to a work channel producing corresponding gometry .pnts/.glb files and tileset.json files 51 | // continues working until work channel is closed or if an error is raised. In this last case submits the error to an error 52 | // channel before quitting 53 | func (c *StandardConsumer) Consume(workchan chan *WorkUnit, errchan chan error, waitGroup *sync.WaitGroup) { 54 | defer func() { 55 | if r := recover(); r != nil { 56 | errchan <- fmt.Errorf("panic: %v", r) 57 | } 58 | }() 59 | // signal waitgroup finished work 60 | defer waitGroup.Done() 61 | for { 62 | // get work from channel 63 | work, ok := <-workchan 64 | if !ok { 65 | // channel was closed by producer, quit infinite loop 66 | break 67 | } 68 | 69 | // do work 70 | err := c.doWork(work) 71 | 72 | // if there were errors during work send in error channel and quit 73 | if err != nil { 74 | errchan <- err 75 | break 76 | } 77 | } 78 | 79 | } 80 | 81 | // Takes a workunit and writes the corresponding content.glb/.pnts and tileset.json files 82 | func (c *StandardConsumer) doWork(workUnit *WorkUnit) error { 83 | parentFolder := workUnit.BasePath 84 | node := workUnit.Node 85 | 86 | // Create base folder if it does not exist 87 | err := utils.CreateDirectoryIfDoesNotExist(parentFolder) 88 | if err != nil { 89 | return err 90 | } 91 | // encodes and writes the geometries to the disk as a .pnts/.glb file 92 | err = c.encoder.Write(node, parentFolder) 93 | if err != nil { 94 | return err 95 | } 96 | // as an edge case we could have a leaf root node. This needs a tileset.json even if it's leaf. 97 | if !workUnit.Node.IsLeaf() || workUnit.Node.IsRoot() { 98 | // if the node has children also writes the tileset.json file 99 | err := c.writeTilesetJsonFile(*workUnit) 100 | if err != nil { 101 | return err 102 | } 103 | } 104 | return nil 105 | } 106 | 107 | // Writes the tileset.json file for the given WorkUnit 108 | func (c *StandardConsumer) writeTilesetJsonFile(workUnit WorkUnit) error { 109 | parentFolder := workUnit.BasePath 110 | node := workUnit.Node 111 | 112 | // Create base folder if it does not exist 113 | err := utils.CreateDirectoryIfDoesNotExist(parentFolder) 114 | if err != nil { 115 | return err 116 | } 117 | 118 | // tileset.json file 119 | file := path.Join(parentFolder, "tileset.json") 120 | jsonData, err := c.generateTilesetJson(node) 121 | if err != nil { 122 | return err 123 | } 124 | 125 | // Writes the tileset.json binary content to the given file 126 | err = os.WriteFile(file, jsonData, 0666) 127 | if err != nil { 128 | return err 129 | } 130 | 131 | return nil 132 | } 133 | 134 | // Generates the tileset.json content for the given tree node 135 | func (c *StandardConsumer) generateTilesetJson(node tree.Node) ([]byte, error) { 136 | if !node.IsLeaf() || node.IsRoot() { 137 | root, err := c.generateTilesetRoot(node) 138 | if err != nil { 139 | return nil, err 140 | } 141 | 142 | tileset := c.generateTileset(node, root) 143 | 144 | // Outputting a formatted json file 145 | e, err := json.Marshal(tileset) 146 | if err != nil { 147 | return nil, err 148 | } 149 | 150 | return e, nil 151 | } 152 | 153 | return nil, errors.New("this node is a non-root leaf, cannot create a tileset json for it") 154 | } 155 | 156 | func (c *StandardConsumer) generateTilesetRoot(node tree.Node) (Root, error) { 157 | reg := node.BoundingBox() 158 | 159 | children, err := c.generateTilesetChildren(node) 160 | if err != nil { 161 | return Root{}, err 162 | } 163 | 164 | var cMajorTransformPtr *[16]float64 165 | if trans := node.ToParentCRS(); trans != nil && *trans != model.IdentityTransform { 166 | cMajor := trans.ForwardColumnMajor() 167 | cMajorTransformPtr = &cMajor 168 | } 169 | 170 | return Root{ 171 | Content: Content{c.encoder.Filename()}, 172 | BoundingVolume: BoundingVolume{Box: reg.AsCesiumBox()}, 173 | GeometricError: node.GeometricError(), 174 | Refine: "ADD", 175 | Children: children, 176 | Transform: cMajorTransformPtr, 177 | }, nil 178 | } 179 | 180 | func (c *StandardConsumer) generateTileset(node tree.Node, root Root) Tileset { 181 | tileset := Tileset{} 182 | tileset.Asset = Asset{Version: c.encoder.TilesetVersion()} 183 | tileset.GeometricError = node.GeometricError() 184 | tileset.Root = root 185 | 186 | return tileset 187 | } 188 | 189 | func (c *StandardConsumer) generateTilesetChildren(node tree.Node) ([]Child, error) { 190 | var children []Child 191 | for i, child := range node.Children() { 192 | if c.nodeContainsPoints(child) { 193 | children = append(children, c.generateTilesetChild(child, i)) 194 | } 195 | } 196 | return children, nil 197 | } 198 | 199 | func (c *StandardConsumer) nodeContainsPoints(node tree.Node) bool { 200 | return node != nil && node.TotalNumberOfPoints() > 0 201 | } 202 | 203 | func (c *StandardConsumer) generateTilesetChild(child tree.Node, childIndex int) Child { 204 | childJson := Child{} 205 | filename := "tileset.json" 206 | if child.IsLeaf() { 207 | filename = c.encoder.Filename() 208 | } 209 | childJson.Content = Content{ 210 | Url: strconv.Itoa(childIndex) + "/" + filename, 211 | } 212 | reg := child.BoundingBox() 213 | childJson.BoundingVolume = BoundingVolume{ 214 | Box: reg.AsCesiumBox(), 215 | } 216 | childJson.GeometricError = child.GeometricError() 217 | childJson.Refine = "ADD" 218 | return childJson 219 | } 220 | -------------------------------------------------------------------------------- /share/nad.lst: -------------------------------------------------------------------------------- 1 | Listing of State Plane North American Datum Zones 2 | 3 | NGS zone number 4 | State and zone 1927 1983 5 | 6 | Alabama east .................. 101 101 7 | Alabama west .................. 102 102 8 | Alaska zone no. 1 ............. 5001 5001 9 | Alaska zone no. 2 ............. 5002 5002 10 | Alaska zone no. 3 ............. 5003 5003 11 | Alaska zone no. 4 ............. 5004 5004 12 | Alaska zone no. 5 ............. 5005 5005 13 | Alaska zone no. 6 ............. 5006 5006 14 | Alaska zone no. 7 ............. 5007 5007 15 | Alaska zone no. 8 ............. 5008 5008 16 | Alaska zone no. 9 ............. 5009 5009 17 | Alaska zone no. 10 ............ 5010 5010 18 | American Samoa ................ 5300 19 | Arizona central ............... 202 202 20 | Arizona east .................. 201 201 21 | Arizona west .................. 203 203 22 | Arkansas north ................ 301 301 23 | Arkansas south ................ 302 302 24 | California I .................. 401 401 25 | California II ................. 402 402 26 | California III ................ 403 403 27 | California IV ................. 404 404 28 | California V .................. 405 405 29 | California VI ................. 406 406 30 | California VII ................ 407 31 | Colorado central .............. 502 502 32 | Colorado north ................ 501 501 33 | Colorado south ................ 503 503 34 | Connecticut ................... 600 600 35 | Delaware ...................... 700 700 36 | Florida east .................. 901 901 37 | Florida north ................. 903 903 38 | Florida west .................. 902 902 39 | Georgia east .................. 1001 1001 40 | Georgia west .................. 1002 1002 41 | Guam Island ................... 5400 42 | Hawaii 1 ...................... 5101 5101 43 | Hawaii 2 ...................... 5102 5102 44 | Hawaii 3 ...................... 5103 5103 45 | Hawaii 4 ...................... 5104 5104 46 | Hawaii 5 ...................... 5105 5105 47 | Idaho central ................. 1102 1102 48 | Idaho east .................... 1101 1101 49 | Idaho west .................... 1103 1103 50 | Illinois east ................. 1201 1201 51 | Illinois west ................. 1202 1202 52 | Indiana east .................. 1301 1301 53 | Indiana west .................. 1302 1302 54 | Iowa north .................... 1401 1401 55 | Iowa south .................... 1402 1402 56 | Kansas north .................. 1501 1501 57 | Kansas south .................. 1502 1502 58 | Kentucky north ................ 1601 1601 59 | Kentucky south ................ 1602 1602 60 | Louisiana north ............... 1701 1701 61 | Louisiana offshore ............ 1703 1703 62 | Louisiana south ............... 1702 1702 63 | Maine east .................... 1801 1801 64 | Maine west .................... 1802 1802 65 | Maryland ...................... 1900 1900 66 | Massachusetts island .......... 2002 2002 67 | Massachusetts mainland ........ 2001 2001 68 | Michigan central/l ............ 2112 2112 current 69 | Michigan central/m ............ 2102 old 70 | Michigan east ................. 2101 old 71 | Michigan north ................ 2111 2111 current 72 | Michigan south ................ 2113 2113 current 73 | Michigan west ................. 2103 old 74 | Minnesota central ............. 2202 2202 75 | Minnesota north ............... 2201 2201 76 | Minnesota south ............... 2203 2203 77 | Mississippi east .............. 2301 2301 78 | Mississippi west .............. 2302 2302 79 | Missouri central .............. 2402 2402 80 | Missouri east ................. 2401 2401 81 | Missouri west ................. 2403 2403 82 | Montana ....................... 2500 83 | Montana central ............... 2502 84 | Montana north ................. 2501 85 | Montana south ................. 2503 86 | Nebraska ...................... 2600 87 | Nebraska north ................ 2601 88 | Nebraska south ................ 2602 89 | Nevada central ................ 2702 2702 90 | Nevada east ................... 2701 2701 91 | Nevada west ................... 2703 2703 92 | New hampshire ................. 2800 2800 93 | New jersey .................... 2900 2900 94 | New mexico central ............ 3002 3002 95 | New mexico east ............... 3001 3001 96 | New mexico west ............... 3003 3003 97 | New york central .............. 3102 3102 98 | New york east ................. 3101 3101 99 | New york long island .......... 3104 3104 100 | New york west ................. 3103 3103 101 | North carolina ................ 3200 3200 102 | North dakota north ............ 3301 3301 103 | North dakota south ............ 3302 3302 104 | Ohio north .................... 3401 3401 105 | Ohio south .................... 3402 3402 106 | Oklahoma north ................ 3501 3501 107 | Oklahoma south ................ 3502 3502 108 | Oregon north .................. 3601 3601 109 | Oregon south .................. 3602 3602 110 | Pennsylvania north ............ 3701 3701 111 | Pennsylvania south ............ 3702 3702 112 | Puerto Rico, Virgin Islands ... 5201 5200 113 | Rhode Island .................. 3800 3800 114 | South Carolina ................ 3900 115 | South Carolina north .......... 3901 116 | South Carolina south .......... 3902 117 | South Dakota north ............ 4001 4001 118 | South Dakota south ............ 4002 4002 119 | Tennessee ..................... 4100 4100 120 | Texas central ................. 4203 4203 121 | Texas north ................... 4201 4201 122 | Texas north central ........... 4202 4202 123 | Texas south ................... 4205 4205 124 | Texas south central ........... 4204 4204 125 | Utah central .................. 4302 4302 126 | Utah north .................... 4301 4301 127 | Utah south .................... 4303 4303 128 | Vermont ....................... 4400 4400 129 | Virgin Islands, St. Croix ..... 5202 130 | Virginia north ................ 4501 4501 131 | Virginia south ................ 4502 4502 132 | Washington north .............. 4601 4601 133 | Washington south .............. 4602 4602 134 | West Virginia north ........... 4701 4701 135 | West Virginia south ........... 4702 4702 136 | Wisconsin central ............. 4802 4802 137 | Wisconsin north ............... 4801 4801 138 | Wisconsin south ............... 4803 4803 139 | Wyoming east .................. 4901 4901 140 | Wyoming east central .......... 4902 4902 141 | Wyoming west .................. 4904 4904 142 | Wyoming west central .......... 4903 4903 143 | -------------------------------------------------------------------------------- /internal/writer/pnts.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "io" 7 | "os" 8 | "path" 9 | "strings" 10 | 11 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 12 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 13 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 14 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 15 | ) 16 | 17 | // PntsEncoder writes a node data as Pnts file (3D Tiles 1.0 specs) 18 | type PntsEncoder struct{} 19 | 20 | func (e *PntsEncoder) TilesetVersion() version.TilesetVersion { 21 | return version.TilesetVersion_1_0 22 | } 23 | 24 | func (e *PntsEncoder) Filename() string { 25 | return "content.pnts" 26 | } 27 | 28 | func NewPntsEncoder() *PntsEncoder { 29 | return &PntsEncoder{} 30 | } 31 | 32 | func (e *PntsEncoder) Write(node tree.Node, folderPath string) error { 33 | pts := node.Points() 34 | 35 | // Feature table 36 | featureTableBytes, featureTableLen := e.generateFeatureTable(pts.Len()) 37 | 38 | // Batch table 39 | batchTableBytes, batchTableLen := e.generateBatchTable(pts.Len()) 40 | 41 | // Write binary content to file 42 | pntsFilePath := path.Join(folderPath, e.Filename()) 43 | f, err := os.Create(pntsFilePath) 44 | if err != nil { 45 | return err 46 | } 47 | defer f.Close() 48 | 49 | wr := bufio.NewWriter(f) 50 | 51 | err = e.writePntsHeader(pts.Len(), featureTableLen, batchTableLen, wr) 52 | if err != nil { 53 | return err 54 | } 55 | err = e.writeTable(featureTableBytes, wr) 56 | if err != nil { 57 | return err 58 | } 59 | 60 | err = e.writePointCoords(pts, wr) 61 | if err != nil { 62 | return err 63 | } 64 | 65 | err = e.writePointColors(pts, wr) 66 | if err != nil { 67 | return err 68 | } 69 | 70 | err = e.writeTable(batchTableBytes, wr) 71 | if err != nil { 72 | return err 73 | } 74 | 75 | err = e.writePointIntensities(pts, wr) 76 | if err != nil { 77 | return err 78 | } 79 | 80 | err = e.writePointClassifications(pts, wr) 81 | if err != nil { 82 | return err 83 | } 84 | 85 | err = wr.Flush() 86 | if err != nil { 87 | return err 88 | } 89 | 90 | return nil 91 | } 92 | 93 | func (e *PntsEncoder) generateFeatureTable(numPoints int) ([]byte, int) { 94 | featureTableStr := e.generateFeatureTableJsonContent(numPoints, 0) 95 | featureTableLen := len(featureTableStr) 96 | return []byte(featureTableStr), featureTableLen 97 | } 98 | 99 | func (e *PntsEncoder) generateBatchTable(numPoints int) ([]byte, int) { 100 | batchTableStr := e.generateBatchTableJsonContent(numPoints, 0) 101 | batchTableLen := len(batchTableStr) 102 | return []byte(batchTableStr), batchTableLen 103 | } 104 | 105 | func (e *PntsEncoder) writePntsHeader(numPoints int, featureTableLen int, batchTableLen int, wr io.Writer) error { 106 | _, err := wr.Write([]byte("pnts")) // magic 107 | if err != nil { 108 | return err 109 | } 110 | err = utils.WriteIntAs4ByteNumber(1, wr) // version number 111 | if err != nil { 112 | return err 113 | } 114 | positionBytesLen := 4 * 3 * numPoints // 4 bytes per coordinate component (x,y,z) -> 12 bytes per point 115 | err = utils.WriteIntAs4ByteNumber(28+featureTableLen+positionBytesLen+numPoints*3, wr) // numpoints*3 is colorbytes (1 byte per color component) 116 | if err != nil { 117 | return err 118 | } 119 | err = utils.WriteIntAs4ByteNumber(featureTableLen, wr) // feature table length 120 | if err != nil { 121 | return err 122 | } 123 | err = utils.WriteIntAs4ByteNumber(positionBytesLen+numPoints*3, wr) // feature table binary length (position len + colors len) 124 | if err != nil { 125 | return err 126 | } 127 | err = utils.WriteIntAs4ByteNumber(batchTableLen, wr) // batch table length 128 | if err != nil { 129 | return err 130 | } 131 | err = utils.WriteIntAs4ByteNumber(2*numPoints, wr) // intensity + classification 132 | if err != nil { 133 | return err 134 | } 135 | return nil 136 | } 137 | 138 | func (e *PntsEncoder) writeTable(tableBytes []byte, wr io.Writer) error { 139 | _, err := wr.Write(tableBytes) 140 | if err != nil { 141 | return err 142 | } 143 | return nil 144 | } 145 | 146 | func (e *PntsEncoder) writePointCoords(pts geom.PointList, wr io.Writer) error { 147 | n := pts.Len() 148 | // write coords 149 | for i := 0; i < n; i++ { 150 | pt, err := pts.Next() 151 | if err != nil { 152 | return err 153 | } 154 | err = utils.WriteFloat32LittleEndian(pt.X, wr) 155 | if err != nil { 156 | return err 157 | } 158 | err = utils.WriteFloat32LittleEndian(pt.Y, wr) 159 | if err != nil { 160 | return err 161 | } 162 | err = utils.WriteFloat32LittleEndian(pt.Z, wr) 163 | if err != nil { 164 | return err 165 | } 166 | } 167 | pts.Reset() 168 | return nil 169 | } 170 | 171 | func (e *PntsEncoder) writePointColors(pts geom.PointList, wr io.Writer) error { 172 | n := pts.Len() 173 | // write colors 174 | for i := 0; i < n; i++ { 175 | pt, err := pts.Next() 176 | if err != nil { 177 | return err 178 | } 179 | _, err = wr.Write([]byte{pt.R, pt.G, pt.B}) 180 | if err != nil { 181 | return err 182 | } 183 | } 184 | pts.Reset() 185 | return nil 186 | } 187 | 188 | func (e *PntsEncoder) writePointIntensities(pts geom.PointList, wr io.Writer) error { 189 | n := pts.Len() 190 | // write colors 191 | for i := 0; i < n; i++ { 192 | pt, err := pts.Next() 193 | if err != nil { 194 | return err 195 | } 196 | _, err = wr.Write([]byte{pt.Intensity}) 197 | if err != nil { 198 | return err 199 | } 200 | } 201 | pts.Reset() 202 | return nil 203 | } 204 | 205 | func (e *PntsEncoder) writePointClassifications(pts geom.PointList, wr io.Writer) error { 206 | n := pts.Len() 207 | // write colors 208 | for i := 0; i < n; i++ { 209 | pt, err := pts.Next() 210 | if err != nil { 211 | return err 212 | } 213 | _, err = wr.Write([]byte{pt.Classification}) 214 | if err != nil { 215 | return err 216 | } 217 | } 218 | pts.Reset() 219 | return nil 220 | } 221 | 222 | // Generates the json representation of the feature table 223 | func (e *PntsEncoder) generateFeatureTableJsonContent(pointNo int, spaceNo int) string { 224 | s := fmt.Sprintf(`{"POINTS_LENGTH":%d,"POSITION":{"byteOffset":0},"RGB":{"byteOffset":%d}}%s`, 225 | pointNo, 226 | pointNo*12, 227 | strings.Repeat(" ", spaceNo), 228 | ) 229 | headerByteLength := len([]byte(s)) 230 | paddingSize := headerByteLength % 4 231 | if paddingSize != 0 { 232 | return e.generateFeatureTableJsonContent(pointNo, 4-paddingSize) 233 | } 234 | return s 235 | } 236 | 237 | // Generates the json representation of the batch table 238 | func (e *PntsEncoder) generateBatchTableJsonContent(pointNumber, spaceNumber int) string { 239 | s := fmt.Sprintf(`{"INTENSITY":{"byteOffset":0,"componentType":"UNSIGNED_BYTE","type":"SCALAR"}, 240 | "CLASSIFICATION":{"byteOffset":%d,"componentType":"UNSIGNED_BYTE","type":"SCALAR"}}%s`, 1*pointNumber, strings.Repeat(" ", spaceNumber)) 241 | headerByteLength := len([]byte(s)) 242 | paddingSize := headerByteLength % 4 243 | if paddingSize != 0 { 244 | return e.generateBatchTableJsonContent(pointNumber, 4-paddingSize) 245 | } 246 | return s 247 | } 248 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ################################## 2 | # BASE BUILD IMAGE # 3 | ################################## 4 | FROM ubuntu:latest AS builder-base 5 | 6 | # Change these two arguments to change the version of go and PROJ 7 | ARG GO_VERSION="1.23.2" 8 | ARG PROJ_VERSION="proj-9.5.0" 9 | 10 | # build variable, no impact on the final artifacts 11 | ARG PROJECT_FOLDER="/usr/src/gocesiumtiler" 12 | 13 | # install essential tools 14 | # partly taken from https://github.com/OSGeo/PROJ/blob/master/Dockerfile 15 | RUN apt-get update 16 | RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --fix-missing --no-install-recommends \ 17 | apt-transport-https software-properties-common ca-certificates wget zip unzip curl tar pkg-config \ 18 | git cmake make sqlite3 libsqlite3-dev \ 19 | && apt-get clean && rm -rf /var/lib/apt/lists/* 20 | 21 | # install powershell, required for vcpkg 22 | # taken from https://learn.microsoft.com/en-us/powershell/scripting/install/install-ubuntu?view=powershell-7.4 23 | RUN . /etc/os-release && wget -q "https://packages.microsoft.com/config/ubuntu/$VERSION_ID/packages-microsoft-prod.deb" 24 | RUN dpkg -i packages-microsoft-prod.deb 25 | RUN rm packages-microsoft-prod.deb 26 | RUN apt-get update 27 | RUN apt-get install -y powershell 28 | RUN apt-get clean && rm -rf /var/lib/apt/lists/* 29 | 30 | # install vcpkg to manage packages and dependencies 31 | WORKDIR /vcpkg 32 | RUN git clone https://github.com/Microsoft/vcpkg.git "/vcpkg" 33 | RUN ./bootstrap-vcpkg.sh -disableMetrics 34 | 35 | # clone proj 36 | WORKDIR ${PROJECT_FOLDER} 37 | RUN wget -c https://download.osgeo.org/proj/$PROJ_VERSION.tar.gz 38 | RUN tar -xvzf $PROJ_VERSION.tar.gz 39 | RUN mkdir $PROJ_VERSION/build 40 | 41 | # install golang (taken from https://go.dev/doc/install) 42 | WORKDIR /tmp 43 | RUN wget https://go.dev/dl/go$GO_VERSION.linux-amd64.tar.gz 44 | RUN rm -rf /usr/local/go && tar -C /usr/local -xzf go$GO_VERSION.linux-amd64.tar.gz 45 | ENV PATH="/usr/local/go/bin:${PATH}" 46 | 47 | ################################## 48 | # LINUX X64 BUILDER # 49 | ################################## 50 | FROM builder-base AS linux-builder 51 | # install build tools for linux x64 compilation 52 | RUN apt-get update 53 | RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --fix-missing --no-install-recommends build-essential 54 | 55 | # install proj dependencies for linux x64 56 | RUN /vcpkg/vcpkg install sqlite3[core,tool] tiff --triplet=x64-linux 57 | 58 | # build proj statically for linux x64 59 | WORKDIR ${PROJECT_FOLDER}/${PROJ_VERSION}/build 60 | RUN cmake -DCMAKE_TOOLCHAIN_FILE=/vcpkg/scripts/buildsystems/vcpkg.cmake \ 61 | -DVCPKG_TARGET_TRIPLET=x64-linux \ 62 | -DCMAKE_INSTALL_PREFIX=/usr/local/ \ 63 | -DCMAKE_BUILD_TYPE=Release \ 64 | -DBUILD_APPS=OFF \ 65 | -DBUILD_SHARED_LIBS=OFF \ 66 | -DENABLE_CURL=OFF \ 67 | -DENABLE_TIFF=ON \ 68 | -DBUILD_TESTING=OFF .. \ 69 | -DEMBED_PROJ_DATA_PATH=OFF \ 70 | .. 71 | RUN cmake --build . --config Release -j $(nproc) 72 | RUN cmake --build . --target install -j $(nproc) 73 | 74 | # BUILD_LABEL will force cache invalidation at every build if docker build is run with --build-arg BUILD_LABEL=$(date +%s) 75 | RUN echo "$BUILD_LABEL" 76 | 77 | # clone the source and prepare the build dir 78 | WORKDIR ${PROJECT_FOLDER}/build 79 | COPY . . 80 | RUN mkdir -p ./bin 81 | 82 | # BUILD_ID will force cache invalidation at every build if docker build is run with eg --build-arg BUILD_ID=$(date +%s) 83 | ARG BUILD_ID 84 | RUN echo "build id: $BUILD_ID" 85 | 86 | # build the go app for linux x64 statically using cgo 87 | RUN PKG_CONFIG_PATH="/vcpkg/installed/x64-linux/lib/pkgconfig" \ 88 | CGO_ENABLED=1 \ 89 | CGO_LDFLAGS='-L/vcpkg/installed/x64-linux/lib -g -O2 -static -lstdc++ -lsqlite3 -ltiff -lz -ljpeg -llzma -lm' \ 90 | go build -o ./bin/gocesiumtiler -ldflags "-X main.GitCommit=$(git rev-list -1 HEAD)" ./cmd/main.go 91 | 92 | # run the unit tests 93 | RUN PROJ_DATA="/usr/local/share/proj" \ 94 | PKG_CONFIG_PATH="/vcpkg/installed/x64-linux/lib/pkgconfig" \ 95 | CGO_ENABLED=1 \ 96 | CGO_LDFLAGS='-L/vcpkg/installed/x64-linux/lib -g -O2 -static -lstdc++ -lsqlite3 -ltiff -lz -ljpeg -llzma -lm' \ 97 | go test ./... -v 98 | 99 | 100 | ################################## 101 | # WINDOWS X64 BUILDER # 102 | ################################## 103 | FROM builder-base AS windows-builder 104 | # install mingw-w64 for cross-compilation to windows 105 | RUN apt-get update 106 | RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --fix-missing --no-install-recommends mingw-w64 107 | 108 | # set vcpkg env vars to force statically linked build using mingw x664 109 | ENV VCPKG_DEFAULT_TRIPLET=x64-mingw-static 110 | ENV VCPKG_DEFAULT_HOST_TRIPLET=x64-mingw-static 111 | 112 | # set cmake default compilers and env vars pointing them to mingw 113 | ENV CMAKE_C_COMPILER=x86_64-w64-mingw32-gcc 114 | ENV CMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ 115 | ENV CMAKE_SYSTEM_NAME=Windows 116 | 117 | # install proj dependencies 118 | RUN /vcpkg/vcpkg install sqlite3[core,tool] tiff --triplet=x64-mingw-static 119 | 120 | # build proj statically for windows x64 121 | WORKDIR ${PROJECT_FOLDER}/${PROJ_VERSION}/build 122 | RUN cmake -DCMAKE_TOOLCHAIN_FILE=/vcpkg/scripts/buildsystems/vcpkg.cmake \ 123 | -DCMAKE_SYSTEM_NAME=Windows \ 124 | -DVCPKG_TARGET_TRIPLET=x64-mingw-static \ 125 | -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc \ 126 | -DCMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ \ 127 | -DCMAKE_INSTALL_PREFIX=/usr/local/ \ 128 | -DCMAKE_BUILD_TYPE=Release \ 129 | -DBUILD_APPS=OFF \ 130 | -DBUILD_SHARED_LIBS=OFF \ 131 | -DENABLE_CURL=OFF \ 132 | -DENABLE_TIFF=ON \ 133 | -DBUILD_TESTING=OFF .. \ 134 | -DEMBED_PROJ_DATA_PATH=OFF \ 135 | .. 136 | RUN cmake --build . --config Release -j $(nproc) 137 | RUN cmake --build . --target install -j $(nproc) 138 | 139 | # BUILD_ID will force cache invalidation at every build if docker build is run with eg --build-arg BUILD_ID=$(date +%s) 140 | RUN echo "build id: $BUILD_ID" 141 | 142 | # clone the source and prepare the build dir 143 | WORKDIR ${PROJECT_FOLDER}/build 144 | COPY . . 145 | RUN mkdir -p ./bin 146 | 147 | # build the go app for windows x64 statically using cgo 148 | RUN PKG_CONFIG_PATH="/vcpkg/installed/x64-mingw-static/lib/pkgconfig" \ 149 | CC=x86_64-w64-mingw32-gcc \ 150 | CGO_ENABLED=1 \ 151 | CGO_LDFLAGS='-L/vcpkg/installed/x64-mingw-static/lib -g -O2 -static -lstdc++ -lsqlite3 -ltiff -lzlib -ljpeg -llzma -lm' \ 152 | GOOS="windows" \ 153 | GOARCH="amd64" \ 154 | go build -o ./bin/gocesiumtiler.exe -ldflags "-X main.GitCommit=$(git rev-list -1 HEAD)" ./cmd/main.go 155 | 156 | 157 | ################################## 158 | # Packaging # 159 | ################################## 160 | FROM scratch AS final 161 | ARG PROJECT_FOLDER="/usr/src/gocesiumtiler" 162 | COPY --from=linux-builder /usr/local/share/proj /share 163 | COPY --from=linux-builder ${PROJECT_FOLDER}/build/bin/gocesiumtiler /gocesiumtiler-lin-x64 164 | COPY --from=windows-builder ${PROJECT_FOLDER}/build/bin/gocesiumtiler.exe /gocesiumtiler-win-x64.exe -------------------------------------------------------------------------------- /internal/writer/consumer_test.go: -------------------------------------------------------------------------------- 1 | package writer 2 | 3 | import ( 4 | "encoding/json" 5 | "os" 6 | "path/filepath" 7 | "reflect" 8 | "sync" 9 | "testing" 10 | 11 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/geom" 12 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/tree" 13 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/model" 14 | ) 15 | 16 | func TestConsume(t *testing.T) { 17 | c := NewStandardConsumer() 18 | wc := make(chan *WorkUnit) 19 | ec := make(chan error) 20 | wg := &sync.WaitGroup{} 21 | wg.Add(1) 22 | go c.Consume(wc, ec, wg) 23 | 24 | pts := []model.Point{ 25 | {X: 0, Y: 0, Z: 0, R: 160, G: 166, B: 203, Intensity: 7, Classification: 3}, 26 | {X: 1, Y: 3, Z: 4, R: 186, G: 200, B: 237, Intensity: 7, Classification: 3}, 27 | {X: 2, Y: 6, Z: 8, R: 156, G: 167, B: 204, Intensity: 7, Classification: 3}, 28 | } 29 | 30 | pt1 := &geom.LinkedPoint{ 31 | Pt: pts[0], 32 | } 33 | pt2 := &geom.LinkedPoint{ 34 | Pt: pts[1], 35 | } 36 | pt3 := &geom.LinkedPoint{ 37 | Pt: pts[2], 38 | } 39 | pt1.Next = pt2 40 | pt2.Next = pt3 41 | 42 | stream := geom.NewLinkedPointStream(pt1, 3) 43 | tr := geom.LocalToGlobalTransformFromPoint(1000, 1000, 1000) 44 | n := &tree.MockNode{ 45 | TotalNumPts: 3, 46 | Pts: stream, 47 | Bounds: geom.NewBoundingBox( 48 | 0, 49 | 4, 50 | 0, 51 | 6, 52 | 0, 53 | 8, 54 | ), 55 | Root: true, 56 | Leaf: true, 57 | GeomError: 20, 58 | Transform: &tr, 59 | } 60 | 61 | tmp, err := os.MkdirTemp(os.TempDir(), "tst") 62 | if err != nil { 63 | t.Fatalf("unexpected error %v", err) 64 | } 65 | t.Cleanup(func() { 66 | os.RemoveAll(tmp) 67 | }) 68 | 69 | tmpPath := filepath.Join(tmp, "tst") 70 | 71 | wc <- &WorkUnit{ 72 | Node: n, 73 | BasePath: tmpPath, 74 | } 75 | close(wc) 76 | wg.Wait() 77 | sb, err := os.ReadFile(filepath.Join(tmpPath, "tileset.json")) 78 | if err != nil { 79 | t.Fatalf("unable to read tileset.json: %v", err) 80 | } 81 | expTrans := tr.ForwardColumnMajor() 82 | expected := Tileset{ 83 | Asset: Asset{ 84 | Version: "1.0", 85 | }, 86 | GeometricError: 20, 87 | Root: Root{ 88 | Children: nil, 89 | Content: Content{ 90 | Url: "content.pnts", 91 | }, 92 | BoundingVolume: BoundingVolume{ 93 | Box: [12]float64{ 94 | 2, 95 | 3, 96 | 4, 97 | 2, 0, 0, 98 | 0, 3, 0, 99 | 0, 0, 4, 100 | }, 101 | }, 102 | GeometricError: 20, 103 | Refine: "ADD", 104 | Transform: &expTrans, 105 | }, 106 | } 107 | 108 | actual := Tileset{} 109 | err = json.Unmarshal(sb, &actual) 110 | if err != nil { 111 | t.Fatalf("unable to decode tileset.json: %v", err) 112 | } 113 | if !reflect.DeepEqual(actual, expected) { 114 | t.Errorf("unexpected tileset.json, expected:\n*%v*\n\ngot:\n\n*%v*\n", expected, actual) 115 | } 116 | 117 | actualPnts, err := os.ReadFile(filepath.Join(tmpPath, "content.pnts")) 118 | if err != nil { 119 | t.Fatalf("unable to read content.pnts: %v", err) 120 | } 121 | expectedPnts, err := os.ReadFile("./testdata/content.pnts") 122 | if err != nil { 123 | t.Fatalf("unable to read tileset.json: %v", err) 124 | } 125 | if !reflect.DeepEqual(actualPnts, expectedPnts) { 126 | t.Errorf("expected pnts:\n%v\n\ngot:\n\n%v\n", expectedPnts, actualPnts) 127 | } 128 | } 129 | 130 | func TestConsumeGltf(t *testing.T) { 131 | c := NewStandardConsumer(WithGeometryEncoder(NewGltfEncoder())) 132 | wc := make(chan *WorkUnit) 133 | ec := make(chan error) 134 | wg := &sync.WaitGroup{} 135 | wg.Add(1) 136 | go c.Consume(wc, ec, wg) 137 | 138 | pts := []model.Point{ 139 | {X: 0, Y: 0, Z: 0, R: 160, G: 166, B: 203, Intensity: 7, Classification: 3}, 140 | {X: 1, Y: 1, Z: 1, R: 186, G: 200, B: 237, Intensity: 7, Classification: 3}, 141 | {X: 2, Y: 2, Z: 2, R: 156, G: 167, B: 204, Intensity: 7, Classification: 3}, 142 | } 143 | 144 | pt1 := &geom.LinkedPoint{ 145 | Pt: pts[0], 146 | } 147 | pt2 := &geom.LinkedPoint{ 148 | Pt: pts[1], 149 | } 150 | pt3 := &geom.LinkedPoint{ 151 | Pt: pts[2], 152 | } 153 | pt1.Next = pt2 154 | pt2.Next = pt3 155 | 156 | tr := geom.LocalToGlobalTransformFromPoint(2000, 1000, 1000) 157 | expTrans := tr.ForwardColumnMajor() 158 | stream := geom.NewLinkedPointStream(pt1, 3) 159 | n := &tree.MockNode{ 160 | TotalNumPts: 3, 161 | Pts: stream, 162 | Bounds: geom.NewBoundingBox( 163 | 0, 164 | 4, 165 | 0, 166 | 6, 167 | 0, 168 | 8, 169 | ), 170 | Root: true, 171 | Leaf: true, 172 | GeomError: 20, 173 | Transform: &tr, 174 | } 175 | 176 | tmp, err := os.MkdirTemp(os.TempDir(), "tst") 177 | if err != nil { 178 | t.Fatalf("unexpected error %v", err) 179 | } 180 | t.Cleanup(func() { 181 | os.RemoveAll(tmp) 182 | }) 183 | 184 | tmpPath := filepath.Join(tmp, "tst") 185 | 186 | wc <- &WorkUnit{ 187 | Node: n, 188 | BasePath: tmpPath, 189 | } 190 | close(wc) 191 | wg.Wait() 192 | sb, err := os.ReadFile(filepath.Join(tmpPath, "tileset.json")) 193 | if err != nil { 194 | t.Fatalf("unable to read tileset.json: %v", err) 195 | } 196 | expected := Tileset{ 197 | Asset: Asset{ 198 | Version: "1.1", 199 | }, 200 | GeometricError: 20, 201 | Root: Root{ 202 | Children: nil, 203 | Content: Content{ 204 | Url: "content.glb", 205 | }, 206 | BoundingVolume: BoundingVolume{ 207 | Box: [12]float64{ 208 | 2, 209 | 3, 210 | 4, 211 | 2, 0, 0, 212 | 0, 3, 0, 213 | 0, 0, 4, 214 | }, 215 | }, 216 | GeometricError: 20, 217 | Refine: "ADD", 218 | Transform: &expTrans, 219 | }, 220 | } 221 | 222 | actual := Tileset{} 223 | err = json.Unmarshal(sb, &actual) 224 | if err != nil { 225 | t.Fatalf("unable to decode tileset.json: %v", err) 226 | } 227 | if !reflect.DeepEqual(actual, expected) { 228 | t.Errorf("unexpected tileset.json, expected:\n*%v*\n\ngot:\n\n*%v*\n", expected, actual) 229 | } 230 | 231 | actualGlb, err := os.ReadFile(filepath.Join(tmpPath, "content.glb")) 232 | if err != nil { 233 | t.Fatalf("unable to read content.pnts: %v", err) 234 | } 235 | expectedGlb, err := os.ReadFile("./testdata/content.glb") 236 | if err != nil { 237 | t.Fatalf("unable to read tileset.json: %v", err) 238 | } 239 | if !reflect.DeepEqual(actualGlb, expectedGlb) { 240 | t.Errorf("expected glb:\n%v\n\ngot:\n\n%v\n", expectedGlb, actualGlb) 241 | } 242 | } 243 | 244 | func TestGenerateTilesetChild(t *testing.T) { 245 | c := NewStandardConsumer(WithGeometryEncoder(NewGltfEncoder())).(*StandardConsumer) 246 | node := tree.MockNode{ 247 | Bounds: geom.NewBoundingBox(0, 10, 0, 10, 0, 10), 248 | GeomError: 2.5, 249 | } 250 | out := c.generateTilesetChild(&node, 2) 251 | expectedBox := [12]float64{5, 5, 5, 5, 0, 0, 0, 5, 0, 0, 0, 5} 252 | if actual := out.BoundingVolume.Box; actual != expectedBox { 253 | t.Errorf("expected box %v, got %v", expectedBox, actual) 254 | } 255 | expectedContentUrl := "2/tileset.json" 256 | if actual := out.Content.Url; actual != expectedContentUrl { 257 | t.Errorf("expected url %v, got %v", expectedContentUrl, actual) 258 | } 259 | expectedGeomError := 2.5 260 | if actual := out.GeometricError; actual != expectedGeomError { 261 | t.Errorf("expected geom err %v, got %v", expectedGeomError, actual) 262 | } 263 | if actual := out.Refine; actual != "ADD" { 264 | t.Errorf("expected refine mode ADD, got %v", actual) 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /share/world: -------------------------------------------------------------------------------- 1 | # SCCSID @(#)world 1.2 95/08/05 GIE REL 2 | # proj +init files for various non-U.S. coordinate systems. 3 | # 4 | +lastupdate=2016-12-12 5 | 6 | # Swiss Coordinate System 7 | +proj=somerc +lat_0=46d57'8.660"N +lon_0=7d26'22.500"E 8 | +ellps=bessel +x_0=600000 +y_0=200000 9 | +k_0=1. no_defs <> 10 | # Laborde grid for Madagascar 11 | proj=labrd ellps=intl lon_0=46d26'13.95E lat_0=18d54S 12 | azi=18d54 k_0=.9995 x_0=400000 y_0=800000 13 | no_defs <> 14 | # New Zealand Map Grid (NZMG) 15 | proj=nzmg # Projection unique to N.Z. so all factors fixed 16 | no_defs <> 17 | # Secondary grids DMA TM8358.1, p. 4.3 18 | # British West Indies 19 | proj=tmerc ellps=clrk80 lon_0=62W 20 | x_0=400000 k_0=0.9995 21 | no_defs <> 22 | # Costa Rica Norte 23 | proj=lcc ellps=clrk66 lat_1=10d28N lon_0=84d20W 24 | x_0=500000 y_0=217820.522 k_0=0.99995696 25 | no_defs <> 26 | # Costa Rica Sud 27 | proj=lcc ellps=clrk66 lat_1=9dN lon_0=83d40W 28 | x_0=500000 y_0=327987.436 k_0=0.99995696 29 | no_defs <> 30 | # Cuba Norte 31 | proj=lcc ellps=clrk66 lat_1=22d21N lon_0=81dW 32 | x_0=500000 y_0=280296.016 k_0=0.99993602 33 | no_defs <> 34 | # Cuba Sud 35 | proj=lcc ellps=clrk66 lat_1=20d43'N lon_0=76d50'W 36 | x_0=500000 y_0=229126.939 k_0=0.99994848 37 | no_defs <> 38 | # Dominican Republic 39 | proj=lcc ellps=clrk66 lat_1=18d49'N lon_0=71d30'W 40 | x_0=500000 y_0=277063.657 k_0=0.99991102 41 | no_defs <> 42 | # Egypt 43 | proj=tmerc ellps=intl lon_0=25d30'E x_0=300000 k_0=0.99985 44 | no_defs <> 45 | # Egypt 46 | proj=tmerc ellps=intl lon_0=28d30'E x_0=300000 k_0=0.99985 47 | no_defs <> 48 | # Egypt 49 | proj=tmerc ellps=intl lon_0=31d30'E x_0=300000 k_0=0.99985 50 | no_defs <> 51 | # Egypt 52 | proj=tmerc ellps=intl lon_0=34d30'E x_0=300000 k_0=0.99985 53 | no_defs <> 54 | # Egypt 55 | proj=tmerc ellps=intl lon_0=37d30'E x_0=300000 k_0=0.99985 56 | no_defs <> 57 | # El Salvador 58 | proj=lcc ellps=clrk66 lat_1=13d47'N lon_0=89dW 59 | x_0=500000 y_0=295809.184 k_0=0.99996704 60 | no_defs <> 61 | # Guatemala Norte 62 | proj=lcc ellps=clrk66 lat_1=16d49'N lon_0=90d20'W 63 | x_0=500000 y_0=292209.579 k_0=0.99992226 64 | no_defs <> 65 | # Guatemala Sud 66 | proj=lcc ellps=clrk66 lat_1=14d54'N lon_0=90d20'W 67 | x_0=500000 y_0=325992.681 k_0=0.99989906 68 | no_defs <> 69 | # Haiti 70 | proj=lcc ellps=clrk66 lat_1=18d49'N lon_0=71d30'W 71 | x_0=500000 y_0=277063.657 k_0=0.99991102 72 | no_defs <> 73 | # Honduras Norte 74 | proj=lcc ellps=clrk66 lat_1=15d30'N lon_0=86d10'W 75 | x_0=500000 y_0=296917.439 k_0=0.99993273 76 | no_defs <> 77 | # Honduras Sud 78 | proj=lcc ellps=clrk66 lat_1=13d47'N lon_0=87d10'W 79 | x_0=500000 y_0=296215.903 k_0=0.99995140 80 | no_defs <> 81 | # Levant 82 | proj=lcc ellps=clrk66 lat_1=34d39'N lon_0=37d21'E 83 | x_0=500000 y_0=300000 k_0=0.9996256 84 | no_defs <> 85 | # Nicaragua Norte 86 | proj=lcc ellps=clrk66 lat_1=13d52'N lon_0=85d30'W 87 | x_0=500000 y_0=359891.816 k_0=0.99990314 88 | no_defs <> 89 | # Nicaragua Sud 90 | proj=lcc ellps=clrk66 lat_1=11d40'N lon_0=85d30'W 91 | x_0=500000 y_0=288876.327 k_0=0.99992228 92 | no_defs <> 93 | # Northwest Africa 94 | proj=lcc ellps=clrk80 lat_1=34dN lon_0=0dE 95 | x_0=1000000 y_0=500000 k_0=0.99908 96 | no_defs <> 97 | # Palestine 98 | proj=tmerc a=6378300.79 rf=293.488307656 99 | lat_0=31d44'2.749"N lon_0=35d12'43.490"E 100 | x_0=170251.555 y_0=126867.909 k_0=1 101 | no_defs <> 102 | # Panama 103 | proj=lcc ellps=clrk66 lat_1=8d25'N lon_0=80dW 104 | x_0=500000 y_0=294865.303 k_0=0.99989909 105 | no_defs <> 106 | # other grids in DMA TM8358.1 107 | # British National Grid 108 | proj=tmerc ellps=airy lat_0=49dN lon_0=2dW 109 | k_0=0.9996012717 x_0=400000 y_0=-100000 110 | no_defs <> 111 | # West Malaysian RSO Grid 112 | proj=omerc a=6377295.66402 rf=300.8017 alpha=323d01'32.846" 113 | no_uoff rot_conv lonc=102d15E lat_0=4dN k_0=0.99984 x_0=804670.240 y_0=0 114 | no_defs <> 115 | # India Zone I 116 | proj=lcc ellps=everest lon_0=68E lat_1=32d30'N 117 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 118 | no_defs <> 119 | # India Zone IIA 120 | proj=lcc ellps=everest lon_0=74E lat_1=26N 121 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 122 | no_defs <> 123 | # India Zone IIB 124 | proj=lcc ellps=everest lon_0=90E lat_1=26N 125 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 126 | no_defs <> 127 | # India Zone IIIA 128 | proj=lcc ellps=everest lon_0=80E lat_1=19N 129 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 130 | no_defs <> 131 | # India Zone IIIB 132 | proj=lcc ellps=everest lon_0=100E lat_1=19N 133 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 134 | no_defs <> 135 | # India Zone IVA 136 | proj=lcc ellps=everest lon_0=80E lat_1=12N 137 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 138 | no_defs <> 139 | # India Zone IVB 140 | proj=lcc ellps=everest lon_0=104E lat_1=12N 141 | x_0=2743185.69 y_0=914395.23 k_0=.998786408 142 | no_defs <> 143 | # Ceylon Belt 144 | proj=tmerc ellps=everest lon_0=80d46'18.160"E lat_0=7d0'1.729"N 145 | x_0=160933.56048 y_0=160933.56048 k_0=1. 146 | no_defs <> 147 | # Irish Transverse Mercator Grid 148 | proj=tmerc ellps=mod_airy lat_0=53d30'N lon_0=8W 149 | x_0=200000 y_0=250000 k_0=1.000035 150 | no_defs <> 151 | # Netherlands East Indies Equatorial Zone 152 | proj=merc ellps=bessel lon_0=110E 153 | x_0=3900000 y_0=900000 k_0=0.997 154 | no_defs <> 155 | # Nord Algerie Grid 156 | proj=lcc ellps=clrk80 lon_0=2d42E lat_0=36N 157 | x_0=500000 y_0=300000 k_0=0.999625544 158 | no_defs <> 159 | # Nord Maroc Grid 160 | proj=lcc ellps=clrk80 lon_0=5d24'W lat_0=33d18'N 161 | x_0=500000 y_0=300000 k_0=0.999625769 162 | no_defs <> 163 | # Nord Tunisie Grid 164 | proj=lcc ellps=clrk80 lon_0=9d54E lat_0=36N 165 | x_0=500000 y_0=300000 k_0=0.999625544 166 | no_defs <> 167 | # Sud Algerie Grid 168 | proj=lcc ellps=clrk80 lon_0=2d42E lat_0=33d18'N 169 | x_0=500000 y_0=300000 k_0=0.999625769 170 | no_defs <> 171 | # Sud Maroc Grid 172 | proj=lcc ellps=clrk80 lon_0=5d24W lat_0=29d42'N 173 | x_0=500000 y_0=300000 k_0=0.999615596 174 | no_defs <> 175 | # Sud Tunisie Grid 176 | proj=lcc ellps=clrk80 lon_0=9d54'E lat_0=33d18'N 177 | x_0=500000 y_0=300000 k_0=0.999625769 178 | no_defs <> 179 | # Gauss Krueger Grid for Germany 180 | # 181 | # The first figure of the easting is lon_0 divided by 3 182 | # ( 2 for 6d0E, 3 for 9d0E, 4 for 12d0E) 183 | # For translations you have to remove this first figure 184 | # and convert northings and eastings from km to meter . 185 | # The other way round, divide by 1000 and add the figure. 186 | # I made 3 entries for the officially used grids in Germany 187 | # 188 | # 189 | # Und nochmal in deutsch : 190 | # Die erste Ziffer des Rechtswerts beschreibt den Hauptmeridian 191 | # und ist dessen Gradzahl geteilt durch 3. 192 | # Zum Umrechnen in Grad muss daher die erste Ziffer des Rechtswertes 193 | # entfernt werden und evt. von km auf Metern umgerechnet werden. 194 | # Zur Umrechnung in Gauss Krueger Koordinaten entsprechend die 195 | # Ziffer fuer den Hauptmeridian vor dem Rechtswert ergaenzen. 196 | # Ich hab fuer alle drei in Deutschland ueblichen Hauptmeridiane 197 | # jeweils einen Eintrag ergaenzt. 198 | # 199 | # 200 | # added by Michael Goepel 201 | # 202 | # Gauss Krueger Grid for Germany 203 | proj=tmerc ellps=bessel lon_0=6d0E lat_0=0 204 | x_0=500000 205 | no_defs<> 206 | # Gauss Krueger Grid for Germany 207 | proj=tmerc ellps=bessel lon_0=9d0E lat_0=0 208 | x_0=500000 209 | no_defs<> 210 | # Gauss Krueger Grid for Germany 211 | proj=tmerc ellps=bessel lon_0=12d0E lat_0=0 212 | x_0=500000 213 | no_defs<> 214 | 215 | -------------------------------------------------------------------------------- /cmd/main_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "os" 5 | "path/filepath" 6 | "reflect" 7 | "testing" 8 | 9 | "github.com/mfbonfigli/gocesiumtiler/v2/internal/utils" 10 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler" 11 | "github.com/mfbonfigli/gocesiumtiler/v2/tiler/mutator" 12 | "github.com/mfbonfigli/gocesiumtiler/v2/version" 13 | ) 14 | 15 | func TestDefaultTiler(t *testing.T) { 16 | tl, err := tilerProvider() 17 | if err != nil { 18 | t.Errorf("unexpected error: %v", err) 19 | } 20 | switch tl.(type) { 21 | case *tiler.GoCesiumTiler: 22 | default: 23 | t.Errorf("unexpected tiler type returned") 24 | } 25 | } 26 | 27 | func TestMainProcessFile(t *testing.T) { 28 | mockTiler := &tiler.MockTiler{} 29 | tilerProvider = func() (tiler.Tiler, error) { 30 | return mockTiler, nil 31 | } 32 | os.Args = []string{"gocesiumtiler", "file", 33 | "-out", ".\\abc", 34 | "-crs", "EPSG:4979", 35 | "-resolution", "11.1", 36 | "-z-offset", "-1", 37 | "-depth", "13", 38 | "-subsample", "0.57", 39 | "-min-points-per-tile", "1200", 40 | "-8-bit", 41 | "myfile.las"} 42 | main() 43 | if mockTiler.ProcessFilesCalled != true { 44 | t.Error("expected processFiles called but was not") 45 | } 46 | if actual := mockTiler.InputFiles; !reflect.DeepEqual(actual, []string{"myfile.las"}) { 47 | t.Errorf("expected tiler to be called with %v but got %v", []string{"myfile.las"}, actual) 48 | } 49 | if actual := mockTiler.SourceCRS; actual != "EPSG:4979" { 50 | t.Errorf("expected tiler to be called with epsg %v but got epsg %v", 4979, actual) 51 | } 52 | if actual := mockTiler.OutputFolder; actual != ".\\abc" { 53 | t.Errorf("expected tiler to be called with output folder %v but got %v", ".\\abc", actual) 54 | } 55 | if actual := mockTiler.EightBit; actual != true { 56 | t.Errorf("expected tiler to be called with EightBit %v but got %v", true, actual) 57 | } 58 | if actual := mockTiler.GridSize; actual != 11.1 { 59 | t.Errorf("expected tiler to be called with GridSize %v but got %v", 11.1, actual) 60 | } 61 | if actual := mockTiler.PtsPerTile; actual != 1200 { 62 | t.Errorf("expected tiler to be called with PtsPerTile %v but got %v", 1200, actual) 63 | } 64 | if actual := mockTiler.Depth; actual != 13 { 65 | t.Errorf("expected tiler to be called with Depth %v but got %v", 13, actual) 66 | } 67 | if actual := mockTiler.Mutators[0].(*mutator.ZOffset).Offset; actual != -1 { 68 | t.Errorf("expected tiler to be called with ZOffset mutator with offset %v but got %v", -1, actual) 69 | } 70 | if actual := mockTiler.Mutators[1].(*mutator.Subsampler).Percentage; actual != 0.57 { 71 | t.Errorf("expected tiler to be called with Subsampler mutator with pct %v but got %v", 0.57, actual) 72 | } 73 | if actual := len(mockTiler.Mutators); actual != 2 { 74 | t.Errorf("expected 2 mutators but got %v", actual) 75 | } 76 | if actual := mockTiler.Version; actual != version.TilesetVersion_1_0 { 77 | t.Errorf("expected tiler to be called with Version %v but got %v", "1.0", actual) 78 | } 79 | } 80 | 81 | func TestMainProcessFolder(t *testing.T) { 82 | mockTiler := &tiler.MockTiler{} 83 | tilerProvider = func() (tiler.Tiler, error) { 84 | return mockTiler, nil 85 | } 86 | os.Args = []string{"gocesiumtiler", "folder", 87 | "-out", ".\\abc", 88 | "-epsg", "4979", 89 | "-resolution", "11.1", 90 | "-z-offset", "-1", 91 | "-depth", "13", 92 | "-min-points-per-tile", "1200", 93 | "-8-bit", 94 | "-v", "1.0", 95 | "myfolder"} 96 | main() 97 | if mockTiler.ProcessFolderCalled != true { 98 | t.Error("expected processFolder called but was not") 99 | } 100 | if actual := mockTiler.InputFolder; !reflect.DeepEqual(actual, "myfolder") { 101 | t.Errorf("expected tiler to be called with %v but got %v", "myfolder", actual) 102 | } 103 | if actual := mockTiler.SourceCRS; actual != "EPSG:4979" { 104 | t.Errorf("expected tiler to be called with epsg %v but got epsg %v", 4979, actual) 105 | } 106 | if actual := mockTiler.OutputFolder; actual != ".\\abc" { 107 | t.Errorf("expected tiler to be called with output folder %v but got %v", ".\\abc", actual) 108 | } 109 | if actual := mockTiler.EightBit; actual != true { 110 | t.Errorf("expected tiler to be called with EightBit %v but got %v", true, actual) 111 | } 112 | if actual := mockTiler.GridSize; actual != 11.1 { 113 | t.Errorf("expected tiler to be called with GridSize %v but got %v", 11.1, actual) 114 | } 115 | if actual := mockTiler.PtsPerTile; actual != 1200 { 116 | t.Errorf("expected tiler to be called with PtsPerTile %v but got %v", 1200, actual) 117 | } 118 | if actual := mockTiler.Depth; actual != 13 { 119 | t.Errorf("expected tiler to be called with Depth %v but got %v", 13, actual) 120 | } 121 | if actual := mockTiler.Mutators[0].(*mutator.ZOffset).Offset; actual != -1 { 122 | t.Errorf("expected tiler to be called with ZOffset mutator with offset %v but got %v", -1, actual) 123 | } 124 | if actual := len(mockTiler.Mutators); actual != 1 { 125 | t.Errorf("expected 1 mutator but got %v", actual) 126 | } 127 | if actual := mockTiler.Version; actual != version.TilesetVersion_1_0 { 128 | t.Errorf("expected tiler to be called with Version %v but got %v", "1.0", actual) 129 | } 130 | } 131 | 132 | func TestMainProcessFolderJoin(t *testing.T) { 133 | tmp, err := os.MkdirTemp(os.TempDir(), "tst") 134 | if err != nil { 135 | t.Fatalf("unexpected error %v", err) 136 | } 137 | t.Cleanup(func() { 138 | os.RemoveAll(tmp) 139 | }) 140 | 141 | utils.TouchFile(filepath.Join(tmp, "test0.las")) 142 | utils.TouchFile(filepath.Join(tmp, "test0.xyz")) 143 | utils.TouchFile(filepath.Join(tmp, "test1.LAS")) 144 | utils.TouchFile(filepath.Join(tmp, "test2.LAS")) 145 | 146 | mockTiler := &tiler.MockTiler{} 147 | tilerProvider = func() (tiler.Tiler, error) { 148 | return mockTiler, nil 149 | } 150 | os.Args = []string{"gocesiumtiler", "folder", 151 | "-out", ".\\abc", 152 | "-epsg", "4979", 153 | "-resolution", "11.1", 154 | "-z-offset", "-1", 155 | "-depth", "13", 156 | "-min-points-per-tile", "1200", 157 | "-8-bit", 158 | "-v", "1.1", 159 | "-join", 160 | tmp} 161 | main() 162 | if mockTiler.ProcessFolderCalled != false { 163 | t.Error("expected processFolder to not be called but it was") 164 | } 165 | if mockTiler.ProcessFilesCalled != true { 166 | t.Error("expected processFiles called but was not") 167 | } 168 | expected := []string{ 169 | filepath.Join(tmp, "test0.las"), 170 | filepath.Join(tmp, "test1.LAS"), 171 | filepath.Join(tmp, "test2.LAS"), 172 | } 173 | if actual := mockTiler.InputFiles; !reflect.DeepEqual(actual, expected) { 174 | t.Errorf("expected tiler to be called with %v but got %v", expected, actual) 175 | } 176 | if actual := mockTiler.SourceCRS; actual != "EPSG:4979" { 177 | t.Errorf("expected tiler to be called with epsg %v but got epsg %v", 4979, actual) 178 | } 179 | if actual := mockTiler.OutputFolder; actual != ".\\abc" { 180 | t.Errorf("expected tiler to be called with output folder %v but got %v", ".\\abc", actual) 181 | } 182 | if actual := mockTiler.EightBit; actual != true { 183 | t.Errorf("expected tiler to be called with EightBit %v but got %v", true, actual) 184 | } 185 | if actual := mockTiler.GridSize; actual != 11.1 { 186 | t.Errorf("expected tiler to be called with GridSize %v but got %v", 11.1, actual) 187 | } 188 | if actual := mockTiler.PtsPerTile; actual != 1200 { 189 | t.Errorf("expected tiler to be called with PtsPerTile %v but got %v", 1200, actual) 190 | } 191 | if actual := mockTiler.Depth; actual != 13 { 192 | t.Errorf("expected tiler to be called with Depth %v but got %v", 13, actual) 193 | } 194 | if actual := mockTiler.Mutators[0].(*mutator.ZOffset).Offset; actual != -1 { 195 | t.Errorf("expected tiler to be called with ZOffset mutator with offset %v but got %v", -1, actual) 196 | } 197 | if actual := len(mockTiler.Mutators); actual != 1 { 198 | t.Errorf("expected 1 mutator but got %v", actual) 199 | } 200 | if actual := mockTiler.Version; actual != version.TilesetVersion_1_1 { 201 | t.Errorf("expected tiler to be called with Version %v but got %v", "1.1", actual) 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | # Go Cesium Tiler - Development Setup Guide 2 | 3 | ``` 4 | _ _ _ 5 | __ _ ___ ___ ___ ___(_)_ _ _ __ ___ | |_(_) | ___ _ __ 6 | / _ |/ _ \ / __/ _ \/ __| | | | | '_ _ \| __| | |/ _ \ '__| 7 | | (_| | (_) | (_| __/\__ \ | |_| | | | | | | |_| | | __/ | 8 | \__, |\___/ \___\___||___/_|\__,_|_| |_| |_|\__|_|_|\___|_| 9 | __| | A Cesium Point Cloud tile generator written in golang 10 | |___/ 11 | ``` 12 | 13 | ## Introduction 14 | With the release of version 2, in particular from version 2.0.0-gamma, gocesiumtiler uses the Proj v9.5+ library to perform coordinate conversions. As a result, building the executable is more complex due to the need for `cgo` compiling, but also the need of statically building and linking Proj with the go executable. 15 | 16 | The build environment thus needs to be properly setup to enable the builds. 17 | 18 | ## Reproducible builds powered by Docker 19 | In order to streamline the build steps, reproducible builds are achieved through a Dockerfile. The repository contains three files: 20 | - `Dockerfile`: Contains all the build steps needed to build and test gocesiumtiler in both Windows and Linux. 21 | - `build.sh`: Kickstarts the docker build process injecting the right arguments. 22 | - `build.ps1`: Powershell scripts that works as `build.sh` but meant to be used with powershell under a windows environment. 23 | 24 | The Dockerfile is organized as a multi-stage build. 25 | 1. A base image is prepared, containing essential build tools. 26 | 2. A linux build image is created, where the required dependencies to build under linux are pulled and compiled. Proj is rebuilt from the sources with static linking targeting the Linux OS and then gocesiumtiler is compiled linking it to the static version of the Proj library. 27 | 3. Another linux build image is created, this one pulling the dependencies needed to cross-compile the gocesiumtiler executable under Windows. Proj is rebuilt again, this time targeting the Windows x86-64 architecture. 28 | 4. The build artifacts are copied in a final scratch image where they are ready to be copied out to the host. 29 | 30 | ## Local Development environment setup 31 | Two approaches to local development are possible, the first is using docker to run builds in a reproducible environment, the second is building the code locally without docker. 32 | 33 | Note that all the options mentioned statically link Proj to the final build executable, this ensures the system builds a single highly portable binary that embeds all required dependencies. 34 | 35 | ### Option 1. Install docker and use `build.sh` or `build.ps1` to build the code and run the tests. 36 | 37 | The benefit of using Dockerized builds is enabling reproducible builds. It recommended to always run a "dockerized" build before creating a PR. 38 | 39 | To build just run in a powershell console: 40 | ``` 41 | ./build.ps1 42 | ``` 43 | 44 | ### Option 2. Setup the local machine for local development without Docker. 45 | 46 | This approach is more challenging and the steps are different depending whether you are developing under Windows or Linux. In general the environment needs to be set up mimicking the steps described in the `Dockerfile`. 47 | 48 | The provided Dockerfile has been written to run builds in Linux, and as such it is also documenting how to setup a local Linux dev environment. 49 | For this reason, in the following we will focus mostly instead on the steps needed to setup a dev environment under Windows. 50 | 51 | ### **Setup a Windows development environment** 52 | The commands that follow are supposed to be executed from a Powershell console. 53 | 54 | **1. Install golang** 55 | 56 | **2. Install msys2** 57 | 58 | Msys2 will be used to install build tools like the Mingw64 compiler, CMake and Pkgconfig. 59 | Install it following the instructions here https://www.msys2.org/#installation or using Chocolatey. 60 | 61 | Make sure the `bin` folder is available on the `Path`. The `Path` should contain 62 | the following folder (please adapt them according to where your Msys2 installation is located): 63 | - `C:\msys64\usr\bin` 64 | 65 | **3. Use Msys2 package manager pacman to install the required build tools** 66 | 67 | ``` 68 | pacman -S --noconfirm mingw-w64-x86_64-pkgconf 69 | pacman -S --noconfirm mingw-w64-x86_64-gcc 70 | pacman -S --noconfirm mingw-w64-x86_64-cmake 71 | pacman -S --noconfirm mingw-w64-x86_64-sqlite3 72 | ``` 73 | 74 | Make sure that mingw64 executable is on the `Path` and if not add it manually, i.e. add : 75 | ``` 76 | C:\msys64\mingw64\bin 77 | ``` 78 | to the path. 79 | 80 | **4. Install `Vcpkg`, used to manage the dependencies** 81 | 82 | [vcpkg](https://vcpkg.io/en/) is a free C/C++ package manager that will greatly simplify the build setup. To install typically just clone the git repository in some folder, eg: 83 | ``` 84 | git clone https://github.com/Microsoft/vcpkg.git "C:\vcpkg" 85 | ``` 86 | 87 | And then run: 88 | ``` 89 | cd C:\vcpkg 90 | bootstrap-vcpkg.bat -disableMetrics 91 | ``` 92 | 93 | **5. Set the default Triplets for Vcpkg by creating / setting these environment variables** 94 | 95 | - `VCPKG_DEFAULT_TRIPLET`=`x64-mingw-static` 96 | - `VCPKG_DEFAULT_HOST_TRIPLET` = `x64-mingw-static` 97 | 98 | **6. Install the dependencies needed to build the Proj library (sqlite3 and tiff) via `vcpkg.exe`** 99 | 100 | ``` 101 | vcpkg.exe install sqlite3[core,tool] zlib --triplet=x64-mingw-static 102 | ``` 103 | 104 | Note, we are building a statically linked version of these libraries. 105 | 106 | **7. Clone Proj in some folder, eg `C:\proj`** 107 | 108 | ``` 109 | git clone https://github.com/OSGeo/PROJ.git "C:\proj" 110 | ``` 111 | 112 | If you want a specific version of Proj instead download and uncompress the archive from 113 | ``` 114 | https://download.osgeo.org/proj/$PROJ_VERSION.tar.gz 115 | ``` 116 | 117 | where `$PROJ_VERSION` is the name of the version you want, e.g. `proj-9.5.0` 118 | 119 | **8. Build PROJ with static linking** 120 | 121 | ``` 122 | cd C:\proj 123 | mkdir build 124 | cd build 125 | cmake -DCMAKE_TOOLCHAIN_FILE=C:\vcpkg\scripts\buildsystems\vcpkg.cmake ` 126 | -DVCPKG_TARGET_TRIPLET=x64-mingw-static ` 127 | -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc ` 128 | -DCMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ ` 129 | -DCMAKE_INSTALL_PREFIX=/usr/local/ ` 130 | -DCMAKE_BUILD_TYPE=Release ` 131 | -DBUILD_APPS=OFF ` 132 | -DBUILD_SHARED_LIBS=OFF ` 133 | -DENABLE_CURL=OFF ` 134 | -DENABLE_TIFF=ON ` 135 | -DEMBED_PROJ_DATA_PATH=OFF ` 136 | -DBUILD_TESTING=OFF .. 137 | cmake --build . --config Release -j $(nproc) 138 | cmake --build . --target install -j $(nproc) 139 | ``` 140 | 141 | Make sure `which cmake` points to the Msys2 - Mingw64 installation folder. If `$(nproc)` doesn't work just replace it with the number of CPUs on your system. 142 | 143 | --- 144 | 145 | **Note: Steps 1-8 are only required to be executed once, or when you want to upgrade the Proj version or one of its dependencies.** 146 | 147 | --- 148 | 149 | **9. Build gocesiumtiler** 150 | 151 | This could require some adaptations, and depending on your development configuration you might have to tune parameters like the linker search path or the `PKG_CONFIG_PATH`. 152 | 153 | The following is a general guide of a configuration that could work: 154 | ``` 155 | $env:PKG_CONFIG_PATH="C:\usr\local\lib\pkgconfig;C:\vcpkg\installed\x64-mingw-static\lib\pkgconfig"; ` 156 | $env:CC="x86_64-w64-mingw32-gcc"; ` 157 | $env:CGO_ENABLED=1; ` 158 | $env:CGO_LDFLAGS="-L/vcpkg/installed/x64-mingw-static/lib -g -O2 -static -lstdc++ -lsqlite3 -ltiff -lzlib -ljpeg -llzma -lm"; ` 159 | go build -o ./bin/gocesiumtiler.exe ./cmd/main.go 160 | ``` 161 | 162 | To run the tests similarly run: 163 | ``` 164 | $env:PKG_CONFIG_PATH="C:\usr\local\lib\pkgconfig;C:\vcpkg\installed\x64-mingw-static\lib\pkgconfig"; ` 165 | $env:CC="x86_64-w64-mingw32-gcc"; ` 166 | $env:CGO_ENABLED=1; ` 167 | $env:CGO_LDFLAGS="-L/vcpkg/installed/x64-mingw-static/lib -g -O2 -static -lstdc++ -lsqlite3 -ltiff -lzlib -ljpeg -llzma -lm"; ` 168 | go test -v ./... 169 | ``` 170 | 171 | The environment variables `PKG_CONFIG_PATH`, `CC`, `CGO_ENABLED`, `CGO_LDFLAGS` could also be stored permanently in the environment configuration so that the build and test commands become trivial. 172 | 173 | ``` 174 | go build -o ./bin/gocesiumtiler.exe ./cmd/main.go 175 | ``` 176 | 177 | and 178 | 179 | ``` 180 | go test v ./... 181 | ``` 182 | 183 | ### **Setup a Linux development environment** 184 | 185 | Please refer to the Dockerfile where the commands to setup a dev environment for Ubuntu-based development environment are listed in detail. -------------------------------------------------------------------------------- /share/triangulation.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "description": "Schema for triangulation based transformation", 4 | "type": "object", 5 | "properties": { 6 | "file_type": { 7 | "type": "string", 8 | "enum": [ 9 | "triangulation_file" 10 | ], 11 | "description": "File type. Always \"triangulation_file\"" 12 | }, 13 | "format_version": { 14 | "type": "string", 15 | "enum": [ 16 | "1.0", "1.1" 17 | ] 18 | }, 19 | "name": { 20 | "type": "string", 21 | "description": "A brief descriptive name of the triangulation" 22 | }, 23 | "version": { 24 | "type": "string", 25 | "description": "A string identifying the version of the triangulation. The format for specifying version will be defined by the agency responsible for the triangulation" 26 | }, 27 | "publication_date": { 28 | "$ref": "#/definitions/datetime", 29 | "description": "The date on which this version of the triangulation was published (or possibly the date on which it takes effect?)" 30 | }, 31 | "fallback_strategy": { 32 | "type": "string", 33 | "enum": [ 34 | "none", 35 | "nearest_side", 36 | "nearest_centroid" 37 | ] 38 | }, 39 | "license": { 40 | "type": "string", 41 | "description": "License under which the file is published" 42 | }, 43 | "description": { 44 | "type": "string", 45 | "description": "A text description of the file" 46 | }, 47 | "authority": { 48 | "type": "object", 49 | "description": "Basic information about the agency responsible for the data set", 50 | "properties": { 51 | "name": { 52 | "type": "string", 53 | "description": "The name of the agency" 54 | }, 55 | "url": { 56 | "type": "string", 57 | "description": "The url of the agency website", 58 | "format": "uri" 59 | }, 60 | "address": { 61 | "type": "string", 62 | "description": "The postal address of the agency" 63 | }, 64 | "email": { 65 | "type": "string", 66 | "description": "An email contact address for the agency", 67 | "format": "email" 68 | } 69 | }, 70 | "required": [ 71 | "name" 72 | ], 73 | "additionalProperties": false 74 | }, 75 | "links": { 76 | "type": "array", 77 | "description": "Links to related information", 78 | "items": { 79 | "type": "object", 80 | "properties": { 81 | "href": { 82 | "type": "string", 83 | "description": "The URL holding the information", 84 | "format": "uri" 85 | }, 86 | "rel": { 87 | "type": "string", 88 | "description": "The relationship to the dataset. Proposed relationships are:\n- \"about\": a web page for human consumption describing the model\n- \"source\": the authoritative source data from which the triangulation is built.\n- \"metadata\": ISO 19115 XML metadata regarding the triangulation." 89 | }, 90 | "type": { 91 | "type": "string", 92 | "description": "MIME type" 93 | }, 94 | "title": { 95 | "type": "string", 96 | "description": "Description of the link" 97 | } 98 | }, 99 | "required": [ 100 | "href" 101 | ], 102 | "additionalProperties": false 103 | } 104 | }, 105 | "extent": { 106 | "$ref": "#/definitions/extent", 107 | "description": "Defines the region within which the triangulation is defined. This should be a bounding box defined as an array of [west,south,east,north] coordinate values in a unspecified geographic CRS. This bounding box should be seen as approximate, given that triangulation may be defined with projected coordinates, and also because some triangulations may not cover the whole bounding box." 108 | }, 109 | "input_crs": { 110 | "$ref": "#/definitions/crs", 111 | "description": "String identifying the CRS of source coordinates in the vertices. Typically \"EPSG:XXXX\". If the transformation is for vertical component, this should be the code for a compound CRS (can be EPSG:XXXX+YYYY where XXXX is the code of the horizontal CRS and YYYY the code of the vertical CRS). For example, for the KKJ->ETRS89 transformation, this is EPSG:2393 (\"KKJ / Finland Uniform Coordinate System\"). The input coordinates are assumed to be passed in the \"normalized for visualisation\" / \"GIS friendly\" order, that is longitude, latitude for geographic coordinates and easting, northing for projected coordinates." 112 | }, 113 | "output_crs": { 114 | "$ref": "#/definitions/crs", 115 | "description": "String identifying the CRS of target coordinates in the vertices. Typically \"EPSG:XXXX\". If the transformation is for vertical component, this should be the code for a compound CRS (can be EPSG:XXXX+YYYY where XXXX is the code of the horizontal CRS and YYYY the code of the vertical CRS). For example, for the KKJ->ETRS89 transformation, this is EPSG:3067 (\"ETRS89 / TM35FIN(E,N)\"). The output coordinates will be returned in the \"normalized for visualisation\" / \"GIS friendly\" order, that is easting, that is longitude, latitude for geographic coordinates and easting, northing for projected coordinates." 116 | }, 117 | "transformed_components": { 118 | "type": "array", 119 | "description": "Specify which component of the coordinates are transformed. Either \"horizontal\", \"vertical\" or both", 120 | "minItems": 1, 121 | "maxItems": 2, 122 | "items": { 123 | "type": "string", 124 | "enum": [ 125 | "horizontal", 126 | "vertical" 127 | ] 128 | } 129 | }, 130 | "vertices_columns": { 131 | "type": "array", 132 | "description": "Specify the name of the columns of the rows in the \"vertices\" array. There must be exactly as many elements in \"vertices_columns\" as in a row of \"vertices\". The following names have a special meaning: \"source_x\", \"source_y\", \"target_x\", \"target_y\", \"source_z\", \"target_z\" and \"offset_z\". \"source_x\" and \"source_y\" are compulsory. \"source_x\" is for the source longitude (in degree) or easting. \"source_y\" is for the source latitude (in degree) or northing. \"target_x\" and \"target_y\" are compulsory when \"horizontal\" is specified in \"transformed_components\". (\"source_z\" and \"target_z\") or \"offset_z\" are compulsory when \"vertical\" is specified in \"transformed_components\".", 133 | "minItems": 3, 134 | "items": { 135 | "type": "string" 136 | } 137 | }, 138 | "triangles_columns": { 139 | "type": "array", 140 | "description": "Specify the name of the columns of the rows in the \"triangles\" array. There must be exactly as many elements in \"triangles_columns\" as in a row of \"triangles\". The following names have a special meaning: \"idx_vertex1\", \"idx_vertex2\", \"idx_vertex3\". They are compulsory.", 141 | "minItems": 3, 142 | "items": { 143 | "type": "string" 144 | } 145 | }, 146 | "vertices": { 147 | "type": "array", 148 | "description": "an array whose items are themselves arrays with as many columns as described in \"vertices_columns\"", 149 | "items": { 150 | "type": "array" 151 | } 152 | }, 153 | "triangles": { 154 | "type": "array", 155 | "description": "an array whose items are themselves arrays with as many columns as described in \"triangles_columns\". The value of the \"idx_vertexN\" columns must be indices (between 0 and len(\"vertices\"-1) of items of the \"vertices\" array", 156 | "items": { 157 | "type": "array" 158 | } 159 | } 160 | }, 161 | "required": [ 162 | "file_type", 163 | "format_version", 164 | "transformed_components", 165 | "vertices_columns", 166 | "triangles_columns", 167 | "vertices", 168 | "triangles" 169 | ], 170 | "additionalProperties": false, 171 | "definitions": { 172 | "crs": { 173 | "type": "string" 174 | }, 175 | "datetime": { 176 | "type": "string", 177 | "format": "date-time", 178 | "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" 179 | }, 180 | "extent": { 181 | "type": "object", 182 | "properties": { 183 | "type": { 184 | "type": "string", 185 | "enum": [ 186 | "bbox" 187 | ] 188 | }, 189 | "name" : { 190 | "type": "string", 191 | "description": "Name of the extent (e.g. \"Finland - mainland south of 66°N\")" 192 | }, 193 | "parameters": { 194 | "type": "object", 195 | "properties": { 196 | "bbox": { 197 | "type": "array", 198 | "minItems": 4, 199 | "maxItems": 4, 200 | "items": { 201 | "type": "number" 202 | } 203 | } 204 | } 205 | } 206 | }, 207 | "required": [ 208 | "type", 209 | "parameters" 210 | ], 211 | "additionalProperties": false 212 | } 213 | } 214 | } 215 | --------------------------------------------------------------------------------