changeset 959:6ab012d0f0c2

Started with writing an importer job for sounding results.
author Sascha L. Teichmann <sascha.teichmann@intevation.de>
date Tue, 16 Oct 2018 18:20:50 +0200
parents 2818ad6c7d32
children e23ae2c83427
files pkg/imports/sr.go pkg/octree/vertex.go
diffstat 2 files changed, 225 insertions(+), 2 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pkg/imports/sr.go	Tue Oct 16 18:20:50 2018 +0200
@@ -0,0 +1,199 @@
+package imports
+
+import (
+	"archive/zip"
+	"bufio"
+	"context"
+	"database/sql"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"io"
+	"log"
+	"os"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"time"
+
+	"gemma.intevation.de/gemma/pkg/octree"
+)
+
+type SoundingResult struct {
+	who string
+	dir string
+}
+
+const SoundingResultDateFormat = "2006-01-02"
+
+type SoundingResultDate struct{ time.Time }
+
+type SoundingResultMeta struct {
+	Date           SoundingResultDate `json:"date"`
+	Bottleneck     string             `json:"bottleneck"`
+	EPSG           uint               `json:"epsg"`
+	DepthReference string             `json:"depth-reference"`
+}
+
+func (srd *SoundingResultDate) UnmarshalJSON(data []byte) error {
+	var s string
+	if err := json.Unmarshal(data, &s); err != nil {
+		return err
+	}
+	d, err := time.Parse(SoundingResultDateFormat, s)
+	if err == nil {
+		*srd = SoundingResultDate{d}
+	}
+	return err
+}
+
+func (sr *SoundingResult) Who() string {
+	return sr.who
+}
+
+func (sr *SoundingResult) CleanUp() error {
+	return os.RemoveAll(sr.dir)
+}
+
+func find(needle string, haystack []*zip.File) *zip.File {
+	needle = strings.ToLower(needle)
+	for _, straw := range haystack {
+		if strings.HasSuffix(strings.ToLower(straw.Name), needle) {
+			return straw
+		}
+	}
+	return nil
+}
+
+func loadMeta(f *zip.File) (*SoundingResultMeta, error) {
+	r, err := f.Open()
+	if err != nil {
+		return nil, err
+	}
+	defer r.Close()
+	var m SoundingResultMeta
+	err = json.NewDecoder(r).Decode(&m)
+	return &m, err
+}
+
+func (m *SoundingResultMeta) validate(conn *sql.Conn) error {
+
+	var b bool
+	err := conn.QueryRowContext(context.Background(),
+		`SELECT true FROM internal.depth_references WHERE depth_reference = $1`,
+		m.DepthReference).Scan(&b)
+	switch {
+	case err == sql.ErrNoRows:
+		return fmt.Errorf("Unknown depth reference '%s'\n", m.DepthReference)
+	case err != nil:
+		return err
+	case !b:
+		return errors.New("Unexpected depth reference")
+	}
+
+	err = conn.QueryRowContext(context.Background(),
+		`SELECT true FROM waterway.bottlenecks WHERE bottleneck_id = $1`,
+		m.Bottleneck).Scan(&b)
+	switch {
+	case err == sql.ErrNoRows:
+		return fmt.Errorf("Unknown bottleneck '%s'\n", m.Bottleneck)
+	case err != nil:
+		return err
+	case !b:
+		return errors.New("Unexpected bottleneck")
+	}
+
+	return nil
+}
+
+func loadXYZReader(r io.Reader) (octree.MultiPointZ, error) {
+	mpz := make(octree.MultiPointZ, 0, 250000)
+	s := bufio.NewScanner(r)
+
+	for line := 1; s.Scan(); line++ {
+		text := s.Text()
+		var p octree.Vertex
+		// fmt.Sscanf(text, "%f,%f,%f") is 4 times slower.
+		idx := strings.IndexByte(text, ',')
+		if idx == -1 {
+			log.Printf("format error in line %d\n", line)
+			continue
+		}
+		var err error
+		if p.X, err = strconv.ParseFloat(text[:idx], 64); err != nil {
+			log.Printf("format error in line %d: %v\n", line, err)
+			continue
+		}
+		text = text[idx+1:]
+		if idx = strings.IndexByte(text, ','); idx == -1 {
+			log.Printf("format error in line %d\n", line)
+			continue
+		}
+		if p.Y, err = strconv.ParseFloat(text[:idx], 64); err != nil {
+			log.Printf("format error in line %d: %v\n", line, err)
+			continue
+		}
+		text = text[idx+1:]
+		if p.Z, err = strconv.ParseFloat(text, 64); err != nil {
+			log.Printf("format error in line %d: %v\n", line, err)
+			continue
+		}
+		mpz = append(mpz, p)
+	}
+
+	if err := s.Err(); err != nil {
+		return nil, err
+	}
+
+	return mpz, nil
+}
+
+func loadXYZ(f *zip.File) (octree.MultiPointZ, error) {
+	r, err := f.Open()
+	if err != nil {
+		return nil, err
+	}
+	defer r.Close()
+	return loadXYZReader(r)
+}
+
+func (sr *SoundingResult) Do(conn *sql.Conn) error {
+
+	z, err := zip.OpenReader(filepath.Join(sr.dir, "upload.zip"))
+	if err != nil {
+		return err
+	}
+	defer z.Close()
+
+	mf := find("meta.json", z.File)
+	if mf == nil {
+		return errors.New("Cannot find 'meta.json'")
+	}
+
+	m, err := loadMeta(mf)
+	if err != nil {
+		return err
+	}
+
+	if err := m.validate(conn); err != nil {
+		return err
+	}
+
+	xyzf := find(".xyz", z.File)
+	if xyzf == nil {
+		return errors.New("Cannot find any *.xyz file")
+	}
+
+	xyz, err := loadXYZ(xyzf)
+	if err != nil {
+		return err
+	}
+
+	if len(xyz) == 0 {
+		return errors.New("XYZ does not contain any vertices.")
+	}
+
+	// TODO: Implement more.
+
+	return nil
+}
--- a/pkg/octree/vertex.go	Tue Oct 16 14:59:32 2018 +0200
+++ b/pkg/octree/vertex.go	Tue Oct 16 18:20:50 2018 +0200
@@ -20,6 +20,7 @@
 
 	Line [2]Vertex
 
+	MultiPointZ      []Vertex
 	LineStringZ      []Vertex
 	MultiLineStringZ []LineStringZ
 
@@ -38,10 +39,13 @@
 )
 
 const (
-	wkbNDR              byte   = 1
+	wkbNDR byte = 1
+
 	wkbLineString       uint32 = 2
+	wkbMultiLineString  uint32 = 5
+	wkbPointZ           uint32 = 1000 + 1
 	wkbLineStringZ      uint32 = 1000 + 2
-	wkbMultiLineString  uint32 = 5
+	wkbMultiPointZ      uint32 = 1000 + 4
 	wkbMultiLineStringZ uint32 = 1000 + 5
 )
 
@@ -773,3 +777,23 @@
 
 	return out
 }
+
+func (mpz MultiPointZ) AsWKB() []byte {
+	size := 1 + 4 + 4 + len(mpz)*(1+4+3*8)
+
+	buf := bytes.NewBuffer(make([]byte, 0, size))
+
+	binary.Write(buf, binary.LittleEndian, wkbNDR)
+	binary.Write(buf, binary.LittleEndian, wkbMultiPointZ)
+	binary.Write(buf, binary.LittleEndian, uint32(len(mpz)))
+
+	for _, p := range mpz {
+		binary.Write(buf, binary.LittleEndian, wkbNDR)
+		binary.Write(buf, binary.LittleEndian, wkbPointZ)
+		binary.Write(buf, binary.LittleEndian, math.Float64bits(p.X))
+		binary.Write(buf, binary.LittleEndian, math.Float64bits(p.Y))
+		binary.Write(buf, binary.LittleEndian, math.Float64bits(p.Z))
+	}
+
+	return buf.Bytes()
+}