changeset 3754:ac12e135d4b8 infiniscroll

merge with default
author Thomas Junk <thomas.junk@intevation.de>
date Tue, 25 Jun 2019 08:58:35 +0200
parents dbdef91a3ff3 (current diff) f20bd1aee549 (diff)
children 7ca3b288ac5e
files client/package.json
diffstat 21 files changed, 452 insertions(+), 392 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Thu Jun 20 17:21:09 2019 +0200
+++ b/.hgtags	Tue Jun 25 08:58:35 2019 +0200
@@ -6,3 +6,4 @@
 83f9a131dfb69f3ea1f430728a0831ccf96316d7 v2.1.0
 4d69b79e9df6e2b646ac60f75f6655bb3c64f6c6 v2.1.0
 4d7b481e1d392cd68c48a4ad19fc2865110e724e v3-uat2
+d4a5d11e23a043026a747c626d133b45047d17df v3
--- a/client/package.json	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/package.json	Tue Jun 25 08:58:35 2019 +0200
@@ -1,6 +1,6 @@
 {
   "name": "gemmajs",
-  "version": "3.0.0-dev",
+  "version": "3.0.1-dev",
   "license": "AGPL-3.0-or-later",
   "repository": {
     "type": "hg",
--- a/client/src/components/Search.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/Search.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -187,6 +187,7 @@
     ]),
     ...mapState("imports", ["startDate", "endDate"]),
     ...mapGetters("imports", ["filters"]),
+    ...mapGetters("map", ["openLayersMap"]),
     searchQuery: {
       get() {
         return this.$store.state.application.searchQuery;
@@ -299,17 +300,48 @@
       this.searchQueryIsDirty = false;
     },
     moveToSearchResult(resultEntry) {
+      let zoom = 16;
+      if (resultEntry.type === "bottleneck") {
+        this.openLayersMap()
+          .getLayer("BOTTLENECKS")
+          .setVisible(true);
+      }
+      if (resultEntry.type === "rhm") {
+        this.openLayersMap()
+          .getLayer("DISTANCEMARKSAXIS")
+          .setVisible(true);
+      }
+      if (resultEntry.type === "gauge") {
+        this.openLayersMap()
+          .getLayer("GAUGES")
+          .setVisible(true);
+      }
+      if (resultEntry.type === "stretch") {
+        this.openLayersMap()
+          .getLayer("STRETCHES")
+          .setVisible(true);
+      }
+      if (resultEntry.type === "section") {
+        this.openLayersMap()
+          .getLayer("SECTIONS")
+          .setVisible(true);
+      }
+      if (resultEntry.type === "city") zoom = 13;
+
       if (resultEntry.geom.type == "Point") {
-        let zoom = 11;
-        if (resultEntry.type === "bottleneck") zoom = 17;
-        if (resultEntry.type === "rhm") zoom = 15;
-        if (resultEntry.type === "city") zoom = 13;
-        if (resultEntry.type === "gauge") zoom = 15;
         this.$store.dispatch("map/moveMap", {
           coordinates: resultEntry.geom.coordinates,
           zoom,
           preventZoomOut: true
         });
+      } else if (resultEntry.geom.type == "Polygon") {
+        const boundingBox = [
+          Math.min(...resultEntry.geom.coordinates[0].map(c => c[0])),
+          Math.min(...resultEntry.geom.coordinates[0].map(c => c[1])),
+          Math.max(...resultEntry.geom.coordinates[0].map(c => c[0])),
+          Math.max(...resultEntry.geom.coordinates[0].map(c => c[1]))
+        ];
+        this.$store.dispatch("map/moveToBoundingBox", { boundingBox, zoom });
       }
       // this.searchQuery = ""; // clear search query again
       this.toggleSearchbar();
--- a/client/src/components/fairway/AvailableFairwayDepth.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/fairway/AvailableFairwayDepth.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -97,16 +97,12 @@
       loading: false,
       width: 1000,
       height: 600,
-      paddingRight: 100,
-      spaceBetween: 80,
       labelPaddingBottom: 10,
       scalePaddingLeft: 50,
       paddingTop: 10,
       diagram: null,
       yScale: null,
-      barsWidth: 60,
       dimensions: null,
-      ldcoffset: 3,
       pdf: {
         doc: null,
         width: null,
@@ -251,6 +247,18 @@
     featureName() {
       if (this.selectedFairwayAvailabilityFeature == null) return "";
       return this.selectedFairwayAvailabilityFeature.properties.name;
+    },
+    widthPerItem() {
+      return Math.min(
+        (this.dimensions.width - this.scalePaddingLeft) / this.fwData.length,
+        180
+      );
+    },
+    spaceBetween() {
+      return this.widthPerItem * 0.2;
+    },
+    ldcOffset() {
+      return this.widthPerItem * 0.1;
     }
   },
   methods: {
@@ -504,7 +512,7 @@
         .enter()
         .append("g")
         .attr("transform", (d, i) => {
-          const dx = this.paddingRight + i * this.spaceBetween;
+          const dx = this.scalePaddingLeft + i * this.widthPerItem;
           return `translate(${dx})`;
         });
       this.drawSingleBars(everyBar);
@@ -548,8 +556,8 @@
         .attr("height", d => {
           return this.yScale(0) - this.yScale(hoursInDays(d.height));
         })
-        .attr("x", this.ldcoffset)
-        .attr("width", this.barsWidth - this.ldcoffset)
+        .attr("x", this.ldcOffset + this.spaceBetween)
+        .attr("width", this.widthPerItem - this.ldcOffset - this.spaceBetween)
         .attr("fill", (d, i) => {
           return this.$options.COLORS.REST[i];
         });
@@ -584,8 +592,8 @@
         })
         .attr("y", this.yScale(0))
         .attr("height", height)
-        .attr("x", -this.ldcoffset)
-        .attr("width", this.barsWidth - this.ldcoffset)
+        .attr("x", this.spaceBetween)
+        .attr("width", this.widthPerItem - this.spaceBetween)
         .attr("transform", d => `translate(0 ${-1 * height(d)})`)
         .attr("fill", this.$options.COLORS.LDC)
         .attr("id", "ldc");
@@ -618,9 +626,9 @@
           //dy gives offset of svg on page
         })
         .attr("y", this.yScale(0))
-        .attr("x", this.ldcoffset)
         .attr("height", height)
-        .attr("width", this.barsWidth - this.ldcoffset)
+        .attr("x", this.ldcOffset + this.spaceBetween)
+        .attr("width", this.widthPerItem - this.ldcOffset - this.spaceBetween)
         .attr("transform", d => `translate(0 ${-1 * height(d)})`)
         .attr("fill", this.$options.COLORS.HIGHEST);
     },
@@ -628,7 +636,10 @@
       everyBar
         .append("text")
         .text(d => d.label)
-        .attr("y", this.dimensions.mainHeight - this.labelPaddingBottom);
+        .attr("y", this.dimensions.mainHeight - this.labelPaddingBottom)
+        .attr("x", this.widthPerItem / 2)
+        .attr("text-anchor", "middle")
+        .attr("font-size", "smaller");
     },
     drawScaleLabel() {
       const center = this.dimensions.mainHeight / 2;
--- a/client/src/components/fairway/AvailableFairwayDepthLNWL.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/fairway/AvailableFairwayDepthLNWL.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -511,6 +511,7 @@
         .append("text")
         .text(date)
         .attr("text-anchor", "middle")
+        .attr("font-size", "smaller")
         .attr(
           "transform",
           `translate(${this.scalePaddingLeft +
--- a/client/src/components/gauge/HydrologicalConditions.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/gauge/HydrologicalConditions.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -543,7 +543,10 @@
       updaters.push(this.drawWaterlevelLineChart("q75"));
       updaters.push(this.drawWaterlevelLineChart("mean", this.yearWaterlevels));
       updaters.push(this.drawNowLines());
-      this.drawRefLines(refWaterLevels); // static, doesn't need an updater
+
+      if (refWaterLevels) {
+        this.drawRefLines(refWaterLevels); // static, doesn't need an updater
+      }
 
       // INTERACTIONS
 
@@ -673,23 +676,31 @@
       return { width, mainHeight, navHeight, mainMargin, navMargin };
     },
     getExtent(refWaterLevels) {
-      const waterlevelsRelevantForExtent = [];
+      const waterlevelValues = [];
       this.longtermWaterlevels.forEach(wl => {
-        waterlevelsRelevantForExtent.push(wl.min, wl.max);
+        waterlevelValues.push(wl.min, wl.max);
       });
-      waterlevelsRelevantForExtent.push(
-        refWaterLevels.HDC + (refWaterLevels.HDC - refWaterLevels.LDC) / 8,
-        Math.max(
-          refWaterLevels.LDC - (refWaterLevels.HDC - refWaterLevels.LDC) / 4,
-          0
-        )
-      );
+      if (refWaterLevels) {
+        waterlevelValues.push(
+          refWaterLevels.HDC + (refWaterLevels.HDC - refWaterLevels.LDC) / 8,
+          Math.max(
+            refWaterLevels.LDC - (refWaterLevels.HDC - refWaterLevels.LDC) / 4,
+            0
+          )
+        );
+      } else {
+        let delta = d3.max(waterlevelValues) - d3.min(waterlevelValues);
+        waterlevelValues.push(
+          d3.max(waterlevelValues) + delta * 0.1,
+          d3.min(waterlevelValues) - delta * 0.1
+        );
+      }
       return {
         // set min/max values for the date axis
         date: [startOfYear(new Date()), endOfYear(new Date())],
         // set min/max values for the waterlevel axis
         // including HDC (+ 1/8 HDC-LDC) and LDC (- 1/4 HDC-LDC)
-        waterlevel: d3.extent(waterlevelsRelevantForExtent)
+        waterlevel: d3.extent(waterlevelValues)
       };
     },
     getScale() {
@@ -740,16 +751,20 @@
       };
     },
     drawNowLines() {
+      const now = new Date();
+      const nowCoords = [
+        { x: now, y: this.extent.waterlevel[0] },
+        { x: now, y: this.extent.waterlevel[1] }
+      ];
       const nowLine = d3
         .line()
         .x(d => this.scale.x(d.x))
         .y(d => this.scale.y(d.y));
-
       const nowLabel = selection => {
         selection.attr(
           "transform",
-          `translate(${this.scale.x(new Date())}, ${this.scale.y(
-            this.extent.waterlevel[1] - 16
+          `translate(${this.scale.x(now)}, ${this.scale.y(
+            this.extent.waterlevel[1]
           )})`
         );
       };
@@ -757,10 +772,7 @@
       // draw in main
       this.diagram
         .append("path")
-        .datum([
-          { x: new Date(), y: this.extent.waterlevel[0] },
-          { x: new Date(), y: this.extent.waterlevel[1] - 20 }
-        ])
+        .datum(nowCoords)
         .attr("class", "now-line")
         .attr("d", nowLine);
       this.diagram // label
@@ -773,10 +785,7 @@
       // draw in nav
       this.navigation
         .append("path")
-        .datum([
-          { x: new Date(), y: this.extent.waterlevel[0] },
-          { x: new Date(), y: this.extent.waterlevel[1] - 20 }
-        ])
+        .datum(nowCoords)
         .attr("class", "now-line")
         .attr(
           "d",
--- a/client/src/components/gauge/Waterlevel.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/gauge/Waterlevel.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -565,7 +565,9 @@
 
       // static, don't need updater
       this.drawNavigationChart();
-      this.drawRefLines(refWaterLevels);
+      if (refWaterLevels) {
+        this.drawRefLines(refWaterLevels);
+      }
 
       updaters.push(this.drawNashSutcliffe(72));
       updaters.push(this.drawNashSutcliffe(48));
@@ -730,6 +732,23 @@
       return { width, mainHeight, navHeight, mainMargin, navMargin };
     },
     getExtent(refWaterLevels) {
+      let waterlevelValues = [...this.waterlevels.map(wl => wl.waterlevel)];
+      if (refWaterLevels) {
+        waterlevelValues.push(
+          refWaterLevels.HDC + (refWaterLevels.HDC - refWaterLevels.LDC) / 8,
+          Math.max(
+            refWaterLevels.LDC - (refWaterLevels.HDC - refWaterLevels.LDC) / 4,
+            0
+          )
+        );
+      } else {
+        let delta = d3.max(waterlevelValues) - d3.min(waterlevelValues);
+        waterlevelValues.push(
+          d3.max(waterlevelValues) + delta * 0.1,
+          d3.min(waterlevelValues) - delta * 0.1
+        );
+      }
+
       return {
         // set min/max values for the date axis
         date: [
@@ -738,24 +757,8 @@
         ],
         // set min/max values for the waterlevel axis
         // including HDC (+ 1/8 HDC-LDC) and LDC (- 1/4 HDC-LDC)
-        waterlevel: d3.extent(
-          [
-            ...this.waterlevels,
-            {
-              waterlevel:
-                refWaterLevels.HDC +
-                (refWaterLevels.HDC - refWaterLevels.LDC) / 8
-            },
-            {
-              waterlevel: Math.max(
-                refWaterLevels.LDC -
-                  (refWaterLevels.HDC - refWaterLevels.LDC) / 4,
-                0
-              )
-            }
-          ],
-          d => d.waterlevel
-        )
+        // or, if no refWaterlevels exist, +-10% of delta between min and max wl
+        waterlevel: d3.extent(waterlevelValues)
       };
     },
     getScale() {
@@ -859,16 +862,20 @@
         );
     },
     drawNowLines() {
+      const now = new Date();
+      const nowCoords = [
+        { x: now, y: this.extent.waterlevel[0] },
+        { x: now, y: this.extent.waterlevel[1] }
+      ];
       const nowLine = d3
         .line()
         .x(d => this.scale.x(d.x))
         .y(d => this.scale.y(d.y));
-
       const nowLabel = selection => {
         selection.attr(
           "transform",
-          `translate(${this.scale.x(new Date())}, ${this.scale.y(
-            this.extent.waterlevel[1] - 16
+          `translate(${this.scale.x(now)}, ${this.scale.y(
+            this.extent.waterlevel[1]
           )})`
         );
       };
@@ -876,10 +883,7 @@
       // draw in main
       this.diagram
         .append("path")
-        .datum([
-          { x: new Date(), y: this.extent.waterlevel[0] },
-          { x: new Date(), y: this.extent.waterlevel[1] - 20 }
-        ])
+        .datum(nowCoords)
         .attr("class", "now-line")
         .attr("d", nowLine);
       this.diagram // label
@@ -892,10 +896,7 @@
       // draw in nav
       this.navigation
         .append("path")
-        .datum([
-          { x: new Date(), y: this.extent.waterlevel[0] },
-          { x: new Date(), y: this.extent.waterlevel[1] - 20 }
-        ])
+        .datum(nowCoords)
         .attr("class", "now-line")
         .attr(
           "d",
--- a/client/src/components/importoverview/ImportOverview.vue	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/importoverview/ImportOverview.vue	Tue Jun 25 08:58:35 2019 +0200
@@ -414,6 +414,8 @@
                 this.$store.commit("imports/setReviewed", []);
                 this.$store.dispatch("map/refreshLayers");
                 this.$store.dispatch("imports/loadStagingNotifications");
+                this.$store.dispatch("imports/loadStretches");
+                this.$store.dispatch("imports/loadSections");
                 const messages = response.data
                   .map(x => {
                     if (x.message) return x.message;
--- a/client/src/components/map/styles.js	Thu Jun 20 17:21:09 2019 +0200
+++ b/client/src/components/map/styles.js	Tue Jun 25 08:58:35 2019 +0200
@@ -319,10 +319,15 @@
       if (waterlevel) {
         text += "\n(" + waterlevel + " cm)";
         let refWaterlevels = JSON.parse(feature.get("reference_water_levels"));
-        if (waterlevel < refWaterlevels.LDC) iconColor = "brown";
-        if (waterlevel > refWaterlevels.LDC && waterlevel < refWaterlevels.HDC)
-          iconColor = "blue";
-        if (waterlevel > refWaterlevels.HDC) iconColor = "red";
+        if (refWaterlevels) {
+          if (waterlevel < refWaterlevels.LDC) iconColor = "brown";
+          if (
+            waterlevel > refWaterlevels.LDC &&
+            waterlevel < refWaterlevels.HDC
+          )
+            iconColor = "blue";
+          if (waterlevel > refWaterlevels.HDC) iconColor = "red";
+        }
       }
 
       return [
--- a/pkg/common/zip.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/common/zip.go	Tue Jun 25 08:58:35 2019 +0200
@@ -21,6 +21,9 @@
 // FindInZIP scans a ZIP file directory for a file that ends with
 // case insensitive string. Returns only the first match.
 func FindInZIP(z *zip.ReadCloser, needle string) *zip.File {
+	if z == nil {
+		return nil
+	}
 	needle = strings.ToLower(needle)
 	for _, straw := range z.File {
 		if strings.HasSuffix(strings.ToLower(straw.Name), needle) {
--- a/pkg/controllers/gauges.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/controllers/gauges.go	Tue Jun 25 08:58:35 2019 +0200
@@ -663,6 +663,7 @@
 		return
 	}
 
+	// TODO: FIXME The filter is not correct for predictions!?
 	filters := filterAnd{
 		buildFilterTerm(
 			"location = ($%d::char(2), $%d::char(3), $%d::char(5), $%d::char(5), $%d::int)",
@@ -677,7 +678,7 @@
 			buildFilterTerm(
 				`date_issue = (
                  SELECT max(date_issue)
-                 FROM waterway.gauge_measurements gm
+                 FROM waterway.gauge_predictions gm
                  WHERE location = ($%d::char(2), $%d::char(3), $%d::char(5), $%d::char(5), $%d::int))`,
 				isrs.CountryCode,
 				isrs.LoCode,
--- a/pkg/controllers/pwreset.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/controllers/pwreset.go	Tue Jun 25 08:58:35 2019 +0200
@@ -288,15 +288,11 @@
 	return
 }
 
-func passwordReset(
-	_ interface{},
-	req *http.Request,
-	_ *sql.Conn,
-) (jr JSONResult, err error) {
+func passwordReset(rw http.ResponseWriter, req *http.Request) {
 
 	hash := mux.Vars(req)["hash"]
-	if _, err = hex.DecodeString(hash); err != nil {
-		err = JSONError{http.StatusBadRequest, "Invalid hash"}
+	if _, err := hex.DecodeString(hash); err != nil {
+		http.Error(rw, "invalid hash", http.StatusBadRequest)
 		return
 	}
 
@@ -304,7 +300,7 @@
 
 	ctx := req.Context()
 
-	if err = auth.RunAs(
+	if err := auth.RunAs(
 		ctx, pwResetRole, func(conn *sql.Conn) error {
 			err := conn.QueryRowContext(ctx, findRequestSQL, hash).Scan(&email, &user)
 			switch {
@@ -324,12 +320,18 @@
 			_, err = conn.ExecContext(ctx, deleteRequestSQL, hash)
 			return err
 		}); err == nil {
-		body := changedMessageBody(useHTTPS(req), user, password, host(req))
-		if err = misc.SendMail(email, "Password Reset Done", body); err == nil {
-			jr.Result = &struct {
-				SendTo string `json:"send-to"`
-			}{email}
+		https := useHTTPS(req)
+		server := host(req)
+		body := changedMessageBody(https, user, password, server)
+		if err = misc.SendMail(email, "Password Reset Done", body); err != nil {
+			log.Printf("error: %v\n", err)
+			http.Error(
+				rw,
+				http.StatusText(http.StatusInternalServerError),
+				http.StatusInternalServerError)
+			return
 		}
+		var url = https + "://" + server
+		http.Redirect(rw, req, url, http.StatusSeeOther)
 	}
-	return
 }
--- a/pkg/controllers/routes.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/controllers/routes.go	Tue Jun 25 08:58:35 2019 +0200
@@ -104,10 +104,8 @@
 		NoConn: true,
 	}).Methods(http.MethodPost)
 
-	api.Handle("/users/passwordreset/{hash}", &JSONHandler{
-		Handle: passwordReset,
-		NoConn: true,
-	}).Methods(http.MethodGet)
+	api.HandleFunc("/users/passwordreset/{hash}", passwordReset).
+		Methods(http.MethodGet)
 
 	// Print templates
 	api.Handle("/templates", any(&JSONHandler{
--- a/pkg/controllers/search.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/controllers/search.go	Tue Jun 25 08:58:35 2019 +0200
@@ -18,20 +18,12 @@
 	"database/sql"
 	"net/http"
 	"regexp"
-	"strconv"
 	"strings"
 
 	"gemma.intevation.de/gemma/pkg/models"
 )
 
 const (
-	searchHectometreSQL = `SELECT COALESCE(json_agg(r),'[]')
-FROM (SELECT (location_code).hectometre || ' rhm' AS name,
-			replace(concat(location_code), ',','') AS locationcode,
-             ST_AsGeoJSON(geom)::json AS geom, 'rhm' AS type
-      FROM waterway.distance_marks_virtual
-      WHERE (location_code).hectometre = $1) r`
-
 	searchMostSQL = `SELECT search_most($1)::text`
 
 	listBottlenecksSQL = `
@@ -66,37 +58,11 @@
 	}
 
 	var result string
-
-	m := rkmRegex.FindStringSubmatch(s.SearchString)
-	if len(m) != 0 {
-		// Handle search for river kilometre:
-		var hectometre int
-		if hectometre, err = strconv.Atoi(m[1]); err != nil {
-			return
-		}
-
-		hectometre *= 10
-		if m[3] != "" {
-			var h int
-			if h, err = strconv.Atoi(m[3]); err != nil {
-				return
-			}
-			hectometre += h
-		}
-
-		err = db.QueryRowContext(
-			req.Context(),
-			searchHectometreSQL,
-			hectometre,
-		).Scan(&result)
-	} else {
-		// Hande search for bottlencks:
-		err = db.QueryRowContext(
-			req.Context(),
-			searchMostSQL,
-			s.SearchString,
-		).Scan(&result)
-	}
+	err = db.QueryRowContext(
+		req.Context(),
+		searchMostSQL,
+		s.SearchString,
+	).Scan(&result)
 
 	if err != nil {
 		return
--- a/pkg/imports/gm.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/imports/gm.go	Tue Jun 25 08:58:35 2019 +0200
@@ -363,6 +363,7 @@
 	for _, msg := range result {
 		for _, wrm := range msg.Wrm {
 			curr := string(*wrm.Geo_object.Id)
+			curr = strings.TrimSpace(curr)
 			currIsrs, err := models.IsrsFromString(curr)
 			if err != nil {
 				feedback.Warn("Invalid ISRS code %v", err)
@@ -370,7 +371,7 @@
 			}
 			feedback.Info("Found measurements/predictions for %s", curr)
 			if !isKnown(curr) {
-				feedback.Warn("Cannot import data for %s", curr)
+				feedback.Warn("Cannot find gauge %q for import", curr)
 				continue
 			}
 
@@ -385,6 +386,11 @@
 			newM, newP := 0, 0
 			for _, measure := range wrm.Measure {
 				var unit string
+				if *measure.Measure_code != nts.Measure_code_enumWAL {
+					feedback.Warn("Ignored message with measure_code %s",
+						*measure.Measure_code)
+					continue
+				}
 				if measure.Unit == nil {
 					feedback.Info("'Unit' not specified. Assuming 'cm'")
 					unit = "cm"
@@ -399,17 +405,18 @@
 				convert(measure.Value_min)
 				convert(measure.Value_max)
 
-				if *measure.Measure_code != nts.Measure_code_enumWAL {
-					feedback.Warn("Ignored message with measure_code %s",
-						*measure.Measure_code)
-					continue
-				}
-
 				var dummy int
 				if measure.Predicted {
-					var confInterval pgtype.Numrange
+					confInterval := pgtype.Numrange{
+						Lower:     pgtype.Numeric{Status: pgtype.Null},
+						Upper:     pgtype.Numeric{Status: pgtype.Null},
+						LowerType: pgtype.Inclusive,
+						UpperType: pgtype.Inclusive,
+						Status:    pgtype.Null,
+					}
 					if measure.Value_min != nil && measure.Value_max != nil {
-						var valueMin, valueMax pgtype.Numeric
+						valueMin := pgtype.Numeric{Status: pgtype.Null}
+						valueMax := pgtype.Numeric{Status: pgtype.Null}
 						valueMin.Set(measure.Value_min)
 						valueMax.Set(measure.Value_max)
 						confInterval = pgtype.Numrange{
--- a/pkg/imports/sr.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/imports/sr.go	Tue Jun 25 08:58:35 2019 +0200
@@ -68,7 +68,6 @@
 )
 
 const (
-	tooLongEdge          = 50.0
 	pointsPerSquareMeter = 2
 )
 
@@ -221,11 +220,17 @@
 
 	start := time.Now()
 
-	z, err := zip.OpenReader(filepath.Join(sr.Dir, "sr.zip"))
+	zpath := filepath.Join(sr.Dir, "sr.zip")
+
+	z, err := zip.OpenReader(zpath)
 	if err != nil {
-		return nil, err
+		feedback.Warn("Expected ZIP file: %v", err)
+		feedback.Warn("Falling back to TXT file mode.")
+		z = nil
 	}
-	defer z.Close()
+	if z != nil {
+		defer z.Close()
+	}
 
 	feedback.Info("Looking for 'meta.json'")
 
@@ -272,18 +277,23 @@
 		return nil, common.ToError(err)
 	}
 
-	var xyzf *zip.File
-	for _, ext := range []string{".xyz", ".txt"} {
-		feedback.Info("Looking for '*%s'", ext)
-		if xyzf = common.FindInZIP(z, ext); xyzf != nil {
-			break
+	var xyz octree.MultiPointZ
+
+	if z != nil { // Scanning ZIP file for *.xyz file.
+		var xyzf *zip.File
+		for _, ext := range []string{".xyz", ".txt"} {
+			feedback.Info("Looking for '*%s'", ext)
+			if xyzf = common.FindInZIP(z, ext); xyzf != nil {
+				break
+			}
 		}
+		if xyzf == nil {
+			return nil, errors.New("Cannot find any *.xyz or *.txt file")
+		}
+		xyz, err = loadXYZ(xyzf, feedback, xform)
+	} else { // TXT file mode
+		xyz, err = loadXYZFile(zpath, feedback, xform)
 	}
-	if xyzf == nil {
-		return nil, errors.New("Cannot find any *.xyz or *.txt file")
-	}
-
-	xyz, err := loadXYZ(xyzf, feedback, xform)
 	if err != nil {
 		return nil, err
 	}
@@ -307,20 +317,22 @@
 	var summary interface{}
 
 	if sr.isSingleBeam() {
-		summary, err = sr.singleBeamScan(
+		summary, err = sr.processScan(
 			ctx,
 			tx,
 			feedback,
+			true,
 			importID,
 			m,
 			xyz,
 			boundary,
 		)
 	} else {
-		summary, err = sr.multiBeamScan(
+		summary, err = sr.processScan(
 			ctx,
 			tx,
 			feedback,
+			false,
 			importID,
 			m,
 			xyz,
@@ -343,24 +355,30 @@
 	return summary, nil
 }
 
-func (sr *SoundingResult) singleBeamScan(
+func (sr *SoundingResult) processScan(
 	ctx context.Context,
 	tx *sql.Tx,
 	feedback Feedback,
+	singleBeam bool,
 	importID int64,
 	m *models.SoundingResultMeta,
 	xyz octree.MultiPointZ,
 	boundary polygonSlice,
 ) (interface{}, error) {
 
-	feedback.Info("Processing as single beam scan.")
+	if singleBeam {
+		feedback.Info("Processing as single beam scan.")
+	} else {
+		feedback.Info("Processing as multi beam scan.")
+	}
+
 	feedback.Info("Reproject XYZ data.")
 
 	start := time.Now()
 
 	xyzWKB := xyz.AsWKB()
 	var reproj []byte
-	var epsg uint
+	var epsg uint32
 
 	if err := tx.QueryRowContext(
 		ctx,
@@ -394,10 +412,12 @@
 		removed                 map[int32]struct{}
 		polygonArea             float64
 		clippingPolygonWKB      []byte
+		tin                     *octree.Tin
 	)
 
 	if boundary == nil {
 		feedback.Info("No boundary given. Calulate from XYZ data.")
+		tooLongEdge := tri.EstimateTooLong()
 		feedback.Info("Eliminate triangles with edges longer than %.2f meters.", tooLongEdge)
 
 		var polygon octree.LineStringZ
@@ -451,7 +471,7 @@
 		}
 
 		tin := tri.Tin()
-		tin.EPSG = uint32(epsg)
+		tin.EPSG = epsg
 
 		var str octree.STRTree
 		str.Build(tin)
@@ -459,68 +479,72 @@
 		removed = str.Clip(&clippingPolygon)
 	}
 
-	// Build the first mesh to generate random points on.
+	if singleBeam {
+
+		// Build the first mesh to generate random points on.
+
+		feedback.Info("Build virtual DEM based on original XYZ data.")
+
+		start = time.Now()
 
-	feedback.Info("Build virtual DEM based on original XYZ data.")
+		var tree *octree.Tree
+		{
+			builder := octree.NewBuilder(tri.Tin())
+			builder.Build(removed)
+			tree = builder.Tree()
+		}
+
+		feedback.Info("Building took %v", time.Since(start))
 
-	start = time.Now()
+		feedback.Info("Boundary area: %.2fm²", polygonArea)
+
+		numPoints := int(math.Ceil(polygonArea * pointsPerSquareMeter))
+
+		feedback.Info("Generate %d random points for an average density of ~%d points/m².",
+			numPoints, pointsPerSquareMeter)
+
+		start = time.Now()
+
+		generated := make(octree.LineStringZ, 0, numPoints+clippingPolygon.NumVertices(0))
 
-	var tree *octree.Tree
-	{
-		builder := octree.NewBuilder(tri.Tin())
-		builder.Build(removed)
-		tree = builder.Tree()
-	}
+		tree.GenerateRandomVertices(numPoints, func(vertices []octree.Vertex) {
+			generated = append(generated, vertices...)
+		})
+
+		feedback.Info("Generating %d points took %v.", len(generated), time.Since(start))
 
-	feedback.Info("Building took %v", time.Since(start))
-
-	feedback.Info("Boundary area: %.2fm²", polygonArea)
+		// Add the boundary to new point cloud.
+		dupes := map[[2]float64]struct{}{}
+		clippingPolygon.Vertices(0, func(x, y float64) {
+			key := [2]float64{x, y}
+			if _, found := dupes[key]; found {
+				return
+			}
+			dupes[key] = struct{}{}
+			if z, ok := tree.Value(x, y); ok {
+				generated = append(generated, octree.Vertex{X: x, Y: y, Z: z})
+			}
+		})
 
-	numPoints := int(math.Ceil(polygonArea * pointsPerSquareMeter))
+		feedback.Info("Triangulate new point cloud.")
+		xyz = octree.MultiPointZ(generated)
+		start = time.Now()
 
-	feedback.Info("Generate %d random points for an average density of ~%d points/m².",
-		numPoints, pointsPerSquareMeter)
+		tri, err = octree.Triangulate(xyz)
+		if err != nil {
+			return nil, err
+		}
+		feedback.Info("Second triangulation took %v.", time.Since(start))
+		feedback.Info("Number triangles: %d.", len(tri.Triangles)/3)
+		feedback.Info("Clipping triangles from new mesh.")
+
+	} else { // multi beam
+		// Nothing special
+	}
 
 	start = time.Now()
-
-	generated := make(octree.LineStringZ, 0, numPoints+clippingPolygon.NumVertices(0))
-
-	tree.GenerateRandomVertices(numPoints, func(vertices []octree.Vertex) {
-		generated = append(generated, vertices...)
-	})
-
-	feedback.Info("Generating %d points took %v.", len(generated), time.Since(start))
-
-	// Add the boundary to new point cloud.
-	dupes := map[[2]float64]struct{}{}
-	clippingPolygon.Vertices(0, func(x, y float64) {
-		key := [2]float64{x, y}
-		if _, found := dupes[key]; found {
-			return
-		}
-		dupes[key] = struct{}{}
-		if z, ok := tree.Value(x, y); ok {
-			generated = append(generated, octree.Vertex{X: x, Y: y, Z: z})
-		}
-	})
-
-	feedback.Info("Triangulate new point cloud.")
-
-	start = time.Now()
-
-	xyz = octree.MultiPointZ(generated)
-
-	tri, err = octree.Triangulate(xyz)
-	if err != nil {
-		return nil, err
-	}
-	feedback.Info("Second triangulation took %v.", time.Since(start))
-	feedback.Info("Number triangles: %d.", len(tri.Triangles)/3)
-	feedback.Info("Clipping triangles from new mesh.")
-
-	start = time.Now()
-	tin := tri.Tin()
-	tin.EPSG = uint32(epsg)
+	tin = tri.Tin()
+	tin.EPSG = epsg
 
 	var str octree.STRTree
 	str.Build(tin)
@@ -533,8 +557,6 @@
 	feedback.Info("Clipping STR tree took %v.", time.Since(start))
 	feedback.Info("Number of triangles to clip %d.", len(removed))
 
-	start = time.Now()
-
 	feedback.Info("Build final octree index")
 
 	builder := octree.NewBuilder(tin)
@@ -619,160 +641,6 @@
 	return &summary, nil
 }
 
-func (sr *SoundingResult) multiBeamScan(
-	ctx context.Context,
-	tx *sql.Tx,
-	feedback Feedback,
-	importID int64,
-	m *models.SoundingResultMeta,
-	xyz octree.MultiPointZ,
-	boundary polygonSlice,
-) (interface{}, error) {
-
-	feedback.Info("Processing as multi beam scan.")
-	var (
-		id       int64
-		epsg     uint32
-		lat, lon float64
-	)
-	start := time.Now()
-
-	var hull []byte
-
-	xyzWKB := xyz.AsWKB()
-
-	err := tx.QueryRowContext(
-		ctx,
-		insertHullSQL,
-		m.Bottleneck,
-		m.Date.Time,
-		m.DepthReference,
-		xyzWKB,
-		boundary.asWKB(),
-		m.EPSG,
-	).Scan(
-		&id,
-		&lat,
-		&lon,
-		&epsg,
-		&hull,
-	)
-	xyz, boundary = nil, nil // not need from now on.
-	feedback.Info("Calculating hull took %s.", time.Since(start))
-	switch {
-	case err == sql.ErrNoRows:
-		return nil, fmt.Errorf(
-			"No matching bottleneck of given name or time available: %v", err)
-	case err != nil:
-		return nil, err
-	}
-	feedback.Info("Best suited UTM EPSG: %d", epsg)
-
-	start = time.Now()
-
-	var clippingPolygon octree.Polygon
-
-	if err := clippingPolygon.FromWKB(hull); err != nil {
-		return nil, err
-	}
-	clippingPolygon.Indexify()
-	feedback.Info("Building clipping polygon took %v.", time.Since(start))
-
-	start = time.Now()
-
-	var reproj []byte
-
-	if err = tx.QueryRowContext(
-		ctx,
-		reprojectPointsSQL,
-		xyzWKB,
-		m.EPSG,
-		epsg,
-	).Scan(&reproj); err != nil {
-		return nil, err
-	}
-
-	if err := xyz.FromWKB(reproj); err != nil {
-		return nil, err
-	}
-
-	feedback.Info("Reprojecting points took %v.", time.Since(start))
-	feedback.Info("Number of reprojected points: %d", len(xyz))
-
-	start = time.Now()
-
-	tri, err := octree.Triangulate(xyz)
-	if err != nil {
-		return nil, err
-	}
-	feedback.Info("Triangulation took %v.", time.Since(start))
-
-	start = time.Now()
-
-	tin := tri.Tin()
-
-	var str octree.STRTree
-	str.Build(tin)
-	feedback.Info("Building STR tree took %v", time.Since(start))
-
-	start = time.Now()
-
-	removed := str.Clip(&clippingPolygon)
-	feedback.Info("Clipping STR tree took %v.", time.Since(start))
-	feedback.Info("Number of triangles to clip %d.", len(removed))
-
-	start = time.Now()
-
-	tin.EPSG = epsg
-
-	builder := octree.NewBuilder(tin)
-	builder.Build(removed)
-	octreeIndex, err := builder.Bytes()
-	if err != nil {
-		return nil, err
-	}
-	feedback.Info("Building octree took %v.", time.Since(start))
-
-	start = time.Now()
-	h := sha1.New()
-	h.Write(octreeIndex)
-	checksum := hex.EncodeToString(h.Sum(nil))
-	_, err = tx.ExecContext(ctx, insertOctreeSQL, id, checksum, octreeIndex)
-	if err != nil {
-		return nil, err
-	}
-	feedback.Info("Storing octree index took %s.", time.Since(start))
-
-	tree := builder.Tree()
-
-	start = time.Now()
-	err = generateContours(ctx, tx, tree, id)
-	if err != nil {
-		return nil, err
-	}
-	feedback.Info("Generating and storing contour lines took %s.",
-		time.Since(start))
-
-	// Store for potential later removal.
-	if err = track(ctx, tx, importID, "waterway.sounding_results", id); err != nil {
-		return nil, err
-	}
-
-	summary := struct {
-		Bottleneck string      `json:"bottleneck"`
-		Date       models.Date `json:"date"`
-		Lat        float64     `json:"lat"`
-		Lon        float64     `json:"lon"`
-	}{
-		Bottleneck: m.Bottleneck,
-		Date:       m.Date,
-		Lat:        lat,
-		Lon:        lon,
-	}
-
-	return &summary, nil
-}
-
 // CleanUp removes the folder containing the ZIP file with the
 // the sounding result import.
 func (sr *SoundingResult) CleanUp() error {
@@ -901,6 +769,15 @@
 	return loadXYZReader(r, feedback, xform)
 }
 
+func loadXYZFile(f string, feedback Feedback, xform vertexTransform) (octree.MultiPointZ, error) {
+	r, err := os.Open(f)
+	if err != nil {
+		return nil, err
+	}
+	defer r.Close()
+	return loadXYZReader(r, feedback, xform)
+}
+
 func loadBoundary(z *zip.ReadCloser) (polygonSlice, error) {
 	shpF := common.FindInZIP(z, ".shp")
 	if shpF == nil {
--- a/pkg/octree/triangulation.go	Thu Jun 20 17:21:09 2019 +0200
+++ b/pkg/octree/triangulation.go	Tue Jun 25 08:58:35 2019 +0200
@@ -23,6 +23,8 @@
 	"fmt"
 	"log"
 	"math"
+
+	"gonum.org/v1/gonum/stat"
 )
 
 type Triangulation struct {
@@ -39,19 +41,54 @@
 	return &Triangulation{points, t.convexHull(), t.triangles, t.halfedges}, err
 }
 
+func (t *Triangulation) EstimateTooLong() float64 {
+
+	num := len(t.Triangles) / 3
+
+	lengths := make([]float64, 0, num)
+
+	points := t.Points
+
+tris:
+	for i := 0; i < num; i++ {
+		idx := i * 3
+		var max float64
+		vs := t.Triangles[idx : idx+3]
+		for j, vj := range vs {
+			if t.Halfedges[idx+j] < 0 {
+				continue tris
+			}
+			k := (j + 1) % 3
+			if l := points[vj].Distance2D(points[vs[k]]); l > max {
+				max = l
+			}
+		}
+		lengths = append(lengths, max)
+	}
+
+	std := stat.StdDev(lengths, nil)
+	return 3.5 * std
+}
+
 func (t *Triangulation) ConcaveHull(tooLong float64) (LineStringZ, map[int32]struct{}) {
 
+	if tooLong <= 0 {
+		tooLong = t.EstimateTooLong()
+	}
+
 	tooLong *= tooLong
 
 	var candidates []int32
 
+	points := t.Points
+
 	for i, num := 0, len(t.Triangles)/3; i < num; i++ {
 		idx := i * 3
 		var max float64
 		vs := t.Triangles[idx : idx+3]
 		for j, vj := range vs {
 			k := (j + 1) % 3
-			if l := t.Points[vj].SquaredDistance2D(t.Points[vs[k]]); l > max {
+			if l := points[vj].SquaredDistance2D(points[vs[k]]); l > max {
 				max = l
 			}
 		}
@@ -188,12 +225,12 @@
 	curr := rings[0]
 
 	polygon := LineStringZ{
-		t.Points[curr.a],
-		t.Points[curr.b],
+		points[curr.a],
+		points[curr.b],
 	}
 
 	for curr = curr.next; curr != rings[0]; curr = curr.next {
-		polygon = append(polygon, t.Points[curr.b])
+		polygon = append(polygon, points[curr.b])
 	}
 
 	polygon = append(polygon, t.Points[rings[0].a])
--- a/schema/gemma.sql	Thu Jun 20 17:21:09 2019 +0200
+++ b/schema/gemma.sql	Tue Jun 25 08:58:35 2019 +0200
@@ -178,6 +178,33 @@
 $$
 LANGUAGE plpgsql;
 
+-- Constraint trigger: sounding Results must intersect with the area
+-- of the bottleneck they belong to.
+CREATE OR REPLACE FUNCTION check_sr_in_bn_area() RETURNS trigger
+LANGUAGE plpgsql
+AS $$
+BEGIN
+    IF NOT st_intersects((SELECT area
+                          FROM waterway.bottlenecks
+                          WHERE (bottleneck_id, validity)
+                              =(NEW.bottleneck_id, NEW.bottleneck_validity)),
+                         NEW.area)
+    THEN
+        RAISE EXCEPTION
+            'new row for relation "%" violates constraint trigger "%"',
+                TG_TABLE_NAME, TG_NAME
+            USING
+                DETAIL = 'Failing row area has no intersection with bottleneck.',
+                ERRCODE = 23514,
+                SCHEMA = TG_TABLE_SCHEMA,
+                TABLE = TG_TABLE_NAME,
+                CONSTRAINT = TG_NAME;
+    END IF;
+    RETURN NEW;
+END;
+$$;
+
+
 --
 -- GEMMA data
 --
@@ -663,6 +690,9 @@
         octree_index bytea,
         staging_done boolean NOT NULL DEFAULT false
     )
+    CREATE CONSTRAINT TRIGGER sounding_results_in_bn_area
+        AFTER INSERT OR UPDATE ON sounding_results
+        FOR EACH ROW EXECUTE FUNCTION check_sr_in_bn_area()
 
     CREATE TABLE sounding_results_contour_lines (
         sounding_result_id int NOT NULL REFERENCES sounding_results
--- a/schema/isrs.sql	Thu Jun 20 17:21:09 2019 +0200
+++ b/schema/isrs.sql	Tue Jun 25 08:58:35 2019 +0200
@@ -154,6 +154,7 @@
 DECLARE
     isrs_len CONSTANT int = 20;
 BEGIN
+    isrs_text = upper(isrs_text);
     IF char_length(isrs_text) <> isrs_len
     THEN
         RAISE 'ISRS location code must be % characters long', isrs_len
--- a/schema/isrs_functions.sql	Thu Jun 20 17:21:09 2019 +0200
+++ b/schema/isrs_functions.sql	Tue Jun 25 08:58:35 2019 +0200
@@ -210,3 +210,16 @@
     $$
     LANGUAGE plpgsql
     STABLE PARALLEL SAFE;
+
+
+-- Check if a given string looks like an ISRS code
+CREATE OR REPLACE FUNCTION is_ISRSstring(str text) RETURNS boolean
+AS $$
+BEGIN
+  str = upper(str);
+  RETURN (SELECT str SIMILAR TO '[A-Z]{2}[A-Z0-9]{13}[0-9]{5}')
+         AND is_country(substring(str from 1 for 2));
+END;
+    $$
+    LANGUAGE plpgsql
+    IMMUTABLE PARALLEL SAFE;
--- a/schema/search_functions.sql	Thu Jun 20 17:21:09 2019 +0200
+++ b/schema/search_functions.sql	Tue Jun 25 08:58:35 2019 +0200
@@ -4,15 +4,44 @@
 -- SPDX-License-Identifier: AGPL-3.0-or-later
 -- License-Filename: LICENSES/AGPL-3.0.txt
 
--- Copyright (C) 2018 by via donau
+-- Copyright (C) 2018,2019 by via donau
 --   – Österreichische Wasserstraßen-Gesellschaft mbH
 -- Software engineering by Intevation GmbH
 
 -- Author(s):
 --  * Sascha Wilde <wilde@intevation.de>
 
+CREATE OR REPLACE FUNCTION search_hectometre(search_string text) RETURNS jsonb
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
+  AS $$
+DECLARE
+  _result jsonb;
+BEGIN
+  IF search_string SIMILAR TO '[0-9]+' THEN
+    SELECT COALESCE(json_agg(r),'[]')
+      INTO _result
+      FROM (SELECT (location_code).hectometre || ' rhm' AS name,
+                        isrs_asText(location_code) AS locationcode,
+               ST_AsGeoJSON(geom)::json AS geom, 'rhm' AS type
+        FROM waterway.distance_marks_virtual
+        WHERE (location_code).hectometre = search_string::int) r;
+  ELSIF is_ISRSstring(search_string) THEN
+    SELECT COALESCE(json_agg(r),'[]')
+      INTO _result
+      FROM (SELECT (location_code).hectometre || ' rhm' AS name,
+                        isrs_asText(location_code) AS locationcode,
+               ST_AsGeoJSON(geom)::json AS geom, 'rhm' AS type
+        FROM waterway.distance_marks_virtual
+        WHERE location_code = isrs_fromText(search_string)) r;
+  ELSE
+    _result='[]';
+  END IF;
+  RETURN _result;
+END;
+$$;
+
 CREATE OR REPLACE FUNCTION search_bottlenecks(search_string text) RETURNS jsonb
-  LANGUAGE plpgsql
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
   AS $$
 DECLARE
   _result jsonb;
@@ -20,7 +49,7 @@
   SELECT COALESCE(json_agg(r),'[]')
     INTO _result
     FROM (SELECT objnam AS name,
-                 ST_AsGeoJSON(ST_Centroid(area))::json AS geom,
+                 ST_AsGeoJSON(ST_Envelope(area::geometry))::json AS geom,
                  'bottleneck' AS type
             FROM waterway.bottlenecks
             WHERE objnam ILIKE '%' || search_string || '%'
@@ -30,7 +59,7 @@
 $$;
 
 CREATE OR REPLACE FUNCTION search_cities(search_string text) RETURNS jsonb
-  LANGUAGE plpgsql
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
   AS $$
 DECLARE
   _result jsonb;
@@ -52,25 +81,38 @@
 $$;
 
 CREATE OR REPLACE FUNCTION search_gauges(search_string text) RETURNS jsonb
-  LANGUAGE plpgsql
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
   AS $$
 DECLARE
   _result jsonb;
 BEGIN
-  SELECT COALESCE(json_agg(r),'[]')
-    INTO _result
-    FROM (SELECT objname AS name,
-                 ST_AsGeoJSON(geom)::json AS geom,
-                 'gauge' AS type
-            FROM waterway.gauges
-            WHERE objname ILIKE '%' || search_string || '%'
-          ORDER BY name) r;
+  IF is_ISRSstring(search_string) THEN
+    SELECT COALESCE(json_agg(r),'[]')
+      INTO _result
+      FROM (
+         SELECT objname AS name,
+                   ST_AsGeoJSON(geom)::json AS geom,
+                   'gauge' AS type
+              FROM waterway.gauges
+              WHERE NOT erased AND location = isrs_fromText(search_string)
+            ORDER BY name) r;
+  ELSE
+    SELECT COALESCE(json_agg(r),'[]')
+      INTO _result
+      FROM (
+         SELECT objname AS name,
+                   ST_AsGeoJSON(geom)::json AS geom,
+                   'gauge' AS type
+              FROM waterway.gauges
+              WHERE NOT erased AND objname ILIKE '%' || search_string || '%'
+            ORDER BY name) r;
+  END IF;
   RETURN _result;
 END;
 $$;
 
 CREATE OR REPLACE FUNCTION search_sections(search_string text) RETURNS jsonb
-  LANGUAGE plpgsql
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
   AS $$
 DECLARE
   _result jsonb;
@@ -78,7 +120,7 @@
   SELECT COALESCE(json_agg(r),'[]')
     INTO _result
     FROM (SELECT objnam AS name,
-                 ST_AsGeoJSON(ST_Centroid(area))::json AS geom,
+                 ST_AsGeoJSON(ST_Envelope(area::geometry))::json AS geom,
                  'section' AS type
             FROM waterway.sections
             WHERE objnam ILIKE '%' || search_string || '%'
@@ -88,13 +130,34 @@
 END;
 $$;
 
+CREATE OR REPLACE FUNCTION search_stretches(search_string text) RETURNS jsonb
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
+  AS $$
+DECLARE
+  _result jsonb;
+BEGIN
+  SELECT COALESCE(json_agg(r),'[]')
+    INTO _result
+    FROM (SELECT objnam AS name,
+                 ST_AsGeoJSON(ST_Envelope(area::geometry))::json AS geom,
+                 'stretch' AS type
+            FROM waterway.stretches
+            WHERE objnam ILIKE '%' || search_string || '%'
+               OR nobjnam ILIKE '%' || search_string || '%'
+          ORDER BY name) r;
+  RETURN _result;
+END;
+$$;
+
 CREATE OR REPLACE FUNCTION search_most(search_string text) RETURNS jsonb
-  LANGUAGE plpgsql
+  LANGUAGE plpgsql STABLE PARALLEL SAFE
   AS $$
 BEGIN
-  RETURN search_bottlenecks(search_string)
+  RETURN search_hectometre(search_string)
+         || search_bottlenecks(search_string)
          || search_gauges(search_string)
          || search_sections(search_string)
+         || search_stretches(search_string)
          || search_cities(search_string);
 END;
 $$;