changeset 4815:69657aab46ec

translation: merge translations from weblate
author Fadi Abbud <fadi.abbud@intevation.de>
date Wed, 30 Oct 2019 11:31:12 +0100
parents ad2ad7bae4a6 (diff) ef6a2b9cec43 (current diff)
children 1d35d78019ca
files
diffstat 73 files changed, 1943 insertions(+), 1078 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Wed Oct 23 12:37:01 2019 +0200
+++ b/.hgtags	Wed Oct 30 11:31:12 2019 +0100
@@ -16,3 +16,9 @@
 b5619087e3e909645eeab9e3f198667692895382 v4-preview20190918
 8a6c410f6f03ca8f50022c11f17d0bb2b86215f5 v4-preview20190930
 a92239475590146dc6ad99fc4a3f4ced857c73f6 v4
+415c1993c7fe1500d053269761f09aa5ef7b937c v4.1
+415c1993c7fe1500d053269761f09aa5ef7b937c v4.1
+340b375cf3a13a7d74327c560c7ab0aeef2813c0 v4.1
+eb310f7e53b0cdf3f54ffc79dbf546d2275a9259 v4.2
+eb310f7e53b0cdf3f54ffc79dbf546d2275a9259 v4.2
+fc082c611b8bdab3e2139dd88a6892dc806878e7 v4.2
--- a/client/.env	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/.env	Wed Oct 30 11:31:12 2019 +0100
@@ -1,5 +1,6 @@
 #Name of the application, e.g. displayed at login time
 VUE_APP_TITLE=Waterway Monitoring system
+VUE_APP_WINDOWTITLE=
 
 #Backend URL
 VUE_APP_API_URL=/api/
@@ -14,4 +15,4 @@
 VUE_APP_SILENCE_TRANSLATIONWARNINGS =
 
 #Url of user manual
-VUE_APP_USER_MANUAL_URL=
\ No newline at end of file
+VUE_APP_USER_MANUAL_URL=
--- a/client/package.json	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/package.json	Wed Oct 30 11:31:12 2019 +0100
@@ -1,6 +1,6 @@
 {
   "name": "gemmajs",
-  "version": "4.1.0-dev",
+  "version": "4.3.0-dev",
   "license": "AGPL-3.0-or-later",
   "repository": {
     "type": "hg",
--- a/client/public/index.html	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/public/index.html	Wed Oct 30 11:31:12 2019 +0100
@@ -16,7 +16,7 @@
     <meta http-equiv="X-UA-Compatible" content="IE=edge" />
     <meta name="viewport" content="width=device-width,initial-scale=1.0" />
     <link rel="icon" href="<%= BASE_URL %>favicon.ico" />
-    <title>gemmajs</title>
+    <title><%= process.env.VUE_APP_WINDOWTITLE || VUE_APP_TITLE %></title>
     <style>
       html {
         height: 100%;
--- a/client/src/assets/application.scss	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/assets/application.scss	Wed Oct 30 11:31:12 2019 +0100
@@ -20,7 +20,7 @@
 $icon-width: 2rem;
 $large-offset: 2rem;
 $offset: 1rem;
-$sidebar-width: 16rem;
+$sidebar-width: 13rem;
 $slight-transparent: 0.96;
 $small-offset: 0.5rem;
 $smaller: 0.9rem;
@@ -173,8 +173,8 @@
 }
 
 .btn-xs {
-  padding: .3rem 0.4rem;
-  font-size: .75rem;
+  padding: 0.3rem 0.4rem;
+  font-size: 0.75rem;
   line-height: 1;
 }
 
@@ -223,7 +223,7 @@
   height: 50% !important;
 }
 
-.custom-control-input:checked~.custom-control-label::before {
-    border-color: $color-info;
-    background-color: $color-info;
+.custom-control-input:checked ~ .custom-control-label::before {
+  border-color: $color-info;
+  background-color: $color-info;
 }
--- a/client/src/components/App.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/App.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -91,7 +91,7 @@
     },
     searchContainer() {
       return [
-        "ml-2",
+        "ml-1",
         {
           wide: this.showSearchbar
         }
--- a/client/src/components/Contextbox.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/Contextbox.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -80,7 +80,7 @@
   background: #fff;
 }
 .contextbox > div:last-child {
-  width: 668px;
+  width: 795px;
 }
 
 .contextboxcollapsed {
@@ -89,7 +89,7 @@
 }
 
 .contextboxextended {
-  max-width: 668px;
+  max-width: 795px;
 }
 
 .close-contextbox {
--- a/client/src/components/Pdftool.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/Pdftool.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -208,12 +208,7 @@
     },
     filename() {
       let filename = "map";
-      if (
-        this.bottleneckForPrint &&
-        this.openLayersMap()
-          .getLayer("BOTTLENECKISOLINE")
-          .getVisible()
-      ) {
+      if (this.bottleneckForPrint) {
         // TODO: Check if the view contains the selected bottleneck
         // to avoid including bottleneck info in pdf in case view has changed to another location
         filename = `BN-${sanitize(this.bottleneckForPrint).replace(/ /g, "-")}`;
--- a/client/src/components/Search.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/Search.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -85,7 +85,7 @@
 <style lang="scss" scoped>
 .searchcontainer {
   opacity: 0.96;
-  width: 668px;
+  width: 795px;
 }
 
 .searchcontainer .searchbar {
@@ -94,7 +94,7 @@
 }
 
 .searchgroup {
-  width: 635px;
+  width: 763px;
   overflow: hidden;
 }
 
--- a/client/src/components/fairway/AvailableFairwayDepth.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/fairway/AvailableFairwayDepth.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -37,10 +37,18 @@
           </button>
           <a
             :href="dataLink"
-            :download="csvFileName"
+            :download="`${fileName}.csv`"
             class="mt-2 btn btn-sm btn-info w-100"
             ><translate>Download CSV</translate></a
           >
+          <a
+            @click="downloadImage('AFDpng', title)"
+            id="AFDpng"
+            class="btn btn-sm btn-info text-white d-block w-100 mt-2"
+            :download="`${fileName}.png`"
+          >
+            <translate>Download Image</translate>
+          </a>
         </div>
         <div class="btn-group-toggle w-100 mt-2">
           <label
@@ -205,6 +213,13 @@
     dataLink() {
       return `data:text/csv;charset=utf-8, ${encodeURIComponent(this.csv)}`;
     },
+    fileName() {
+      if (!this.frequencyD) return;
+      return this.downloadFilename(
+        this.$gettext("FairwayAvailability"),
+        this.featureName
+      );
+    },
     csvFileName() {
       if (!this.frequencyD) return;
       return (
@@ -240,6 +255,83 @@
     }
   },
   methods: {
+    addLegendToCanvas(ctx, { width, height }) {
+      let x = width / 20,
+        y = height - 35;
+      ctx.font = "14px sans-serif";
+      ctx.textAlign = "start";
+      if (this.legend[3]) {
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.LDC;
+        ctx.strokeStyle = this.$options.COLORS.LDC;
+        ctx.rect(x, y, 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[0], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.HIGHEST;
+        ctx.strokeStyle = this.$options.COLORS.HIGHEST;
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[1], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.REST[1];
+        ctx.strokeStyle = this.$options.COLORS.REST[1];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[2], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.REST[0];
+        ctx.strokeStyle = this.$options.COLORS.REST[0];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[3], x + 40, y + 13);
+        ctx.closePath();
+      } else {
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.LDC;
+        ctx.strokeStyle = this.$options.COLORS.LDC;
+        ctx.rect(x, y, 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[0], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.HIGHEST;
+        ctx.strokeStyle = this.$options.COLORS.HIGHEST;
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[1], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.COLORS.REST[0];
+        ctx.strokeStyle = this.$options.COLORS.REST[0];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legend[2], x + 40, y + 13);
+        ctx.closePath();
+      }
+    },
     initDiagramValues() {
       this.selectedFairwayAvailabilityFeatureD = this.selectedFairwayAvailabilityFeature;
       this.fromDate = this.from;
@@ -284,12 +376,7 @@
         templateData: this.templateData,
         diagramTitle: title
       });
-      this.pdf.doc.save(
-        this.downloadFilename(
-          this.$gettext("FairwayAvailability"),
-          this.featureName
-        ) + ".pdf"
-      );
+      this.pdf.doc.save(this.fileName + ".pdf");
     },
     addDiagramLegend(position, offset, color) {
       let x = offset.x,
@@ -397,6 +484,12 @@
         .attr("width", "100%")
         .attr("height", "100%");
       diagram.append("g");
+      diagram
+        .append("g")
+        .append("rect")
+        .attr("width", "100%")
+        .attr("height", "100%")
+        .attr("fill", "#ffffff");
       const yScale = d3
         .scaleLinear()
         .domain(this.frequencyToRange)
--- a/client/src/components/fairway/AvailableFairwayDepthLNWL.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/fairway/AvailableFairwayDepthLNWL.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -37,10 +37,18 @@
           </button>
           <a
             :href="dataLink"
-            :download="csvFileName"
+            :download="`${fileName}.csv`"
             class="mt-2 btn btn-sm btn-info w-100"
             ><translate>Download CSV</translate></a
           >
+          <a
+            @click="downloadImage('AFDvsLNWLpng', title)"
+            id="AFDvsLNWLpng"
+            class=" btn btn-sm btn-info text-white d-block w-100 mt-2"
+            :download="`${fileName}.png`"
+          >
+            <translate>Download Image</translate>
+          </a>
         </div>
         <div class="btn-group-toggle w-100 mt-2">
           <label
@@ -204,13 +212,11 @@
     dataLink() {
       return `data:text/csv;charset=utf-8, ${encodeURIComponent(this.csv)}`;
     },
-    csvFileName() {
+    fileName() {
       if (!this.frequencyD) return;
-      return (
-        this.downloadFilename(
-          this.$gettext("FairwayAvailabilityVsLNWL"),
-          this.featureName
-        ) + ".csv"
+      return this.downloadFilename(
+        this.$gettext("FairwayAvailabilityVsLNWL"),
+        this.featureName
       );
     },
     availability() {
@@ -230,6 +236,83 @@
     }
   },
   methods: {
+    addLegendToCanvas(ctx, { width, height }) {
+      let x = width / 20,
+        y = height - 35;
+      ctx.font = "14px sans-serif";
+      ctx.textAlign = "start";
+      if (this.legendLNWL[3]) {
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.LWNLCOLORS.LDC;
+        ctx.strokeStyle = this.$options.LWNLCOLORS.LDC;
+        ctx.rect(x, y, 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[0], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.AFDCOLORS[2];
+        ctx.strokeStyle = this.$options.AFDCOLORS[2];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[1], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.AFDCOLORS[1];
+        ctx.strokeStyle = this.$options.AFDCOLORS[1];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[2], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.AFDCOLORS[0];
+        ctx.strokeStyle = this.$options.AFDCOLORS[0];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[2], x + 40, y + 13);
+        ctx.closePath();
+      } else {
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.LWNLCOLORS.LDC;
+        ctx.strokeStyle = this.$options.LWNLCOLORS.LDC;
+        ctx.rect(x, y, 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[0], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.AFDCOLORS[2];
+        ctx.strokeStyle = this.$options.AFDCOLORS[2];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[1], x + 40, y + 13);
+        ctx.closePath();
+
+        ctx.beginPath();
+        ctx.fillStyle = this.$options.AFDCOLORS[0];
+        ctx.strokeStyle = this.$options.AFDCOLORS[0];
+        ctx.rect(x, (y += 25), 35, 20);
+        ctx.fill();
+        ctx.stroke();
+        ctx.fillStyle = "black";
+        ctx.fillText(this.legendLNWL[2], x + 40, y + 13);
+        ctx.closePath();
+      }
+    },
     initDiagramValues() {
       this.selectedFairwayAvailabilityFeatureD = this.selectedFairwayAvailabilityFeature;
       this.fromDate = this.from;
@@ -395,6 +478,12 @@
         .attr("width", "100%")
         .attr("height", "100%");
       diagram = diagram.append("g");
+      diagram
+        .append("g")
+        .append("rect")
+        .attr("width", "100%")
+        .attr("height", "100%")
+        .attr("fill", "#ffffff");
       const yScale = d3
         .scaleLinear()
         .domain([0, 100])
--- a/client/src/components/fairway/Fairwayprofile.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/fairway/Fairwayprofile.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -90,10 +90,17 @@
           >
             <translate>Export to PDF</translate>
           </button>
+          <a
+            @click="downloadImage('fairwaypng', title)"
+            id="fairwaypng"
+            class="btn btn-sm btn-info text-white d-block w-100 mt-2"
+            :download="`${fileName}.png`"
+          >
+            <translate>Export as Image</translate>
+          </a>
         </div>
       </DiagramLegend>
       <div
-        ref="pdfContainer"
         id="pdfContainer"
         class="d-flex flex-fill justify-content-center align-items-center diagram-container position-relative"
       >
@@ -242,7 +249,14 @@
         : this.bottleneck.get("gm_waterlevel");
     },
     refWaterlevel() {
+      if (!this.selectedSurvey) return 0;
       return this.selectedSurvey.waterlevel_value;
+    },
+    fileName() {
+      return this.downloadFilename(
+        this.$gettext("Fairwayprofile"),
+        this.selectedBottleneck
+      );
     }
   },
   watch: {
@@ -279,6 +293,77 @@
     }
   },
   methods: {
+    addLegendToCanvas(ctx, { width, height }) {
+      let x = width / 12,
+        y = height - 55;
+      ctx.font = "12px sans-serif";
+      ctx.textAlign = "start";
+
+      ctx.beginPath();
+      ctx.fillStyle = "#5995ff";
+      ctx.strokeStyle = "#5995ff";
+      ctx.arc(x, y, 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Water"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = this.getLayerStyle(1).fillColor;
+      ctx.strokeStyle = this.getLayerStyle(1).strokeColor;
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.setLineDash([0.8], 0);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Fairway (LOS 1)"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = this.getLayerStyle(2).fillColor;
+      ctx.strokeStyle = this.getLayerStyle(2).strokeColor;
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.setLineDash([1.8], 0);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Fairway (LOS 2)"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = this.getLayerStyle(3).fillColor;
+      ctx.strokeStyle = this.getLayerStyle(3).strokeColor;
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.setLineDash([]);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Fairway (LOS 3)"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "#4a2e06";
+      ctx.strokeStyle = "black";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.setLineDash([]);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Sediment"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "rgba(74, 47, 6, 0.6)";
+      ctx.strokeStyle = "#943007";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.setLineDash([]);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Sediment (compare)"), x + 14, y + 5);
+      ctx.closePath();
+    },
     close() {
       this.$store.commit(
         "application/paneSetup",
@@ -331,12 +416,7 @@
         templateData: this.templateData,
         diagramTitle: fairwayInfo
       });
-      this.pdf.doc.save(
-        this.downloadFilename(
-          this.$gettext("Fairwayprofile"),
-          this.selectedBottleneck
-        ) + ".pdf"
-      );
+      this.pdf.doc.save(this.fileName + ".pdf");
     },
 
     // Diagram legend
@@ -364,8 +444,6 @@
       const toRGB = s => {
         let [, r, g, b] = s.match(/.*?(\d+), (\d+), (\d+), .*/);
         const toHex = n => {
-          console.log(n);
-          console.log(parseInt(n));
           let val = parseInt(n).toString(16);
           if (val.length === 1) return `0${val}`;
           return val;
@@ -436,6 +514,12 @@
       let svg = d3.select(element).append("svg");
       svg.attr("width", "100%");
       svg.attr("height", "100%");
+      svg
+        .append("g")
+        .append("rect")
+        .attr("width", "100%")
+        .attr("height", "100%")
+        .attr("fill", "#ffffff");
       const width = dimensions.width;
       const height = dimensions.mainHeight;
       const offsetY = 15;
@@ -485,11 +569,10 @@
       for (let data of this.fairwayData) {
         data.coordinates.forEach(coordinates => {
           const [startPoint, endPoint, depth] = coordinates;
+          const referenceDepth =
+            this.maxAlt * 1.1 + (this.waterlevel - this.refWaterlevel) / 100;
           let customdepth =
-            this.depth < this.maxAlt * 1.1
-              ? this.depth
-              : this.maxAlt * 1.1 +
-                (this.waterlevel - this.refWaterlevel) / 100;
+            this.depth < referenceDepth ? this.depth : referenceDepth;
           let fairwayArea = d3
             .area()
             .x(function(d) {
@@ -499,6 +582,12 @@
             .y1(function(d) {
               return yScaleRight(d.y);
             });
+          let strokColor = this.getLayerStyle(data.los).strokeColor;
+          // Convert stroke value to rgb() and opacity to pass them separately
+          let [r, g, b, opacity] = strokColor
+            .substring(5, strokColor.length - 1)
+            .split(",");
+          let rgb = `rgb(${r}, ${g}, ${b})`;
           graph
             .append("path")
             .datum([
@@ -506,7 +595,8 @@
               { x: endPoint, y: this.useCustomDepth ? customdepth : depth }
             ])
             .attr("fill", `${this.getLayerStyle(data.los).fillColor}`)
-            .attr("stroke", `${this.getLayerStyle(data.los).strokeColor}`)
+            .attr("stroke", rgb)
+            .attr("stroke-opacity", opacity)
             .attr("stroke-dasharray", this.getLayerStyle(data.los).strokeDash)
             .attr("d", fairwayArea)
             .attr("transform", `translate(0 ${-offsetY})`);
--- a/client/src/components/gauge/HydrologicalConditions.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/gauge/HydrologicalConditions.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -75,10 +75,21 @@
             { disabled: !longtermWaterlevels.length }
           ]"
           :href="csvLink"
-          :download="csvFileName"
+          :download="`${fileName}.csv`"
         >
           <translate>Export as CSV</translate>
         </a>
+        <a
+          @click="downloadImage('hydrologicalpng', title)"
+          id="hydrologicalpng"
+          :class="[
+            'btn btn-sm btn-info text-white d-block w-100 mt-2',
+            { disabled: !longtermWaterlevels.length }
+          ]"
+          :download="`${fileName}.png`"
+        >
+          <translate>Export as Image</translate>
+        </a>
       </DiagramLegend>
       <div
         class="d-flex flex-fill justify-content-center align-items-center"
@@ -107,7 +118,7 @@
  * Markus Kottländer <markus.kottlaender@intevation.de>
  * Fadi Abbud <fadi.abbud@intevation.de>
  */
-
+import app from "@/main";
 import { mapState, mapGetters } from "vuex";
 import * as d3 from "d3";
 import debounce from "debounce";
@@ -163,13 +174,11 @@
     csvLink() {
       return "data:text/csv;charset=utf-8," + encodeURIComponent(this.csvData);
     },
-    csvFileName() {
+    fileName() {
       if (!this.selectedGaugeD || !this.longtermIntervalD) return;
-      return (
-        this.downloadFilename(
-          this.$gettext("HydrologicalCondition"),
-          this.selectedGaugeD.properties.objname
-        ) + ".csv"
+      return this.downloadFilename(
+        this.$gettext("HydrologicalCondition"),
+        this.selectedGaugeD.properties.objname
       );
     },
     csvData() {
@@ -213,6 +222,61 @@
     }
   },
   methods: {
+    addLegendToCanvas(ctx, { width, height }) {
+      let x = width / 12,
+        y = height - 25;
+      ctx.font = "12px sans-serif";
+      ctx.textAlign = "start";
+      ctx.beginPath();
+      ctx.fillStyle = "red";
+      ctx.strokeStyle = "red";
+      ctx.arc(x, y, 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.yearCompare, x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "orange";
+      ctx.strokeStyle = "orange";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Q25%"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "black";
+      ctx.strokeStyle = "black";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Median"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "purple";
+      ctx.strokeStyle = "purple";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Q75%"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "lightsteelblue";
+      ctx.strokeStyle = "lightsteelblue";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Long-term Amplitude"), x + 14, y + 5);
+      ctx.closePath();
+    },
     initialDiagramValues() {
       this.selectedGaugeD = this.selectedGauge;
       this.longtermIntervalD = this.longtermInterval;
@@ -236,12 +300,7 @@
         templateData: this.templateData,
         diagramTitle: diagramTitle
       });
-      this.pdf.doc.save(
-        this.downloadFilename(
-          this.$gettext("HydrologicalCondition"),
-          this.selectedGaugeD.properties.objname
-        ) + ".pdf"
-      );
+      this.pdf.doc.save(this.fileName + ".pdf");
     },
     applyChange() {
       if (this.form.template.hasOwnProperty("properties")) {
@@ -374,6 +433,13 @@
         .attr("width", "100%")
         .attr("height", "100%");
 
+      svg
+        .append("g")
+        .append("rect")
+        .attr("width", "100%")
+        .attr("height", "100%")
+        .attr("fill", "#ffffff");
+
       // create container for main diagram
       const diagram = svg
         .append("g")
@@ -615,8 +681,8 @@
           ? d3.timeFormat("%H:%M")
           : d3.timeMonth(date) < date
           ? d3.timeWeek(date) < date
-            ? d3.timeFormat("%b %d")
-            : d3.timeFormat("%b %d")
+            ? d3.timeFormat(app.$gettext("%a %d"))
+            : d3.timeFormat(app.$gettext("%b %d"))
           : d3.timeFormat("%B"))(date);
       };
       return {
--- a/client/src/components/gauge/Waterlevel.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/gauge/Waterlevel.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -60,11 +60,21 @@
               { disabled: !waterlevels.length }
             ]"
             :href="csvLink"
-            :download="csvFileName"
+            :download="`${fileName}.csv`"
           >
             <translate>Export as CSV</translate>
           </a>
-
+          <a
+            @click="downloadImage('waterlevelpng', title)"
+            id="waterlevelpng"
+            :class="[
+              'btn btn-sm btn-info text-white d-block w-100 mt-2',
+              { disabled: !waterlevels.length }
+            ]"
+            :download="`${fileName}.png`"
+          >
+            <translate>Export as Image</translate>
+          </a>
           <!--
           <button
             @click="downloadSVG"
@@ -117,6 +127,7 @@
  * * Fadi Abbud <fadi.abbud@intevation.de>
  */
 
+import app from "@/main";
 import { mapState, mapGetters } from "vuex";
 import * as d3Base from "d3";
 import { lineChunked } from "d3-line-chunked";
@@ -127,6 +138,7 @@
 import { displayError } from "@/lib/errors";
 import { defaultDiagramTemplate } from "@/lib/DefaultDiagramTemplate";
 import { localeDateString } from "@/lib/datelocalization";
+
 // we should load only d3 modules we need but for now we'll go with the lazy way
 // https://www.giacomodebidda.com/how-to-import-d3-plugins-with-webpack/
 // d3-line-chunked plugin: https://github.com/pbeshai/d3-line-chunked
@@ -182,13 +194,10 @@
         "data:text/csv;charset=utf-8," + encodeURIComponent(this.waterlevelsCSV)
       );
     },
-    csvFileName() {
-      if (!this.dateFromD || !this.dateToD) return "";
-      return (
-        this.downloadFilename(
-          this.$gettext("Waterlevel"),
-          this.selectedGauge.properties.objname
-        ) + ".csv"
+    fileName() {
+      return this.downloadFilename(
+        this.$gettext("Waterlevel"),
+        this.selectedGauge.properties.objname
       );
     },
     hasPredictions() {
@@ -208,6 +217,49 @@
     }
   },
   methods: {
+    addLegendToCanvas(ctx, { width, height }) {
+      let x = width / 10,
+        y = height - 25;
+      ctx.font = "12px sans-serif";
+      ctx.textAlign = "start";
+
+      ctx.beginPath();
+      ctx.fillStyle = "steelblue";
+      ctx.strokeStyle = "white";
+      ctx.arc(x, y, 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Waterlevel"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "#90b4d2";
+      ctx.strokeStyle = "#90b4d2";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.closePath();
+      ctx.beginPath();
+      ctx.fillStyle = "#4682B4";
+      ctx.strokeStyle = "#4682B4";
+      ctx.arc(x, y, 2, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("prediction"), x + 14, y + 5);
+      ctx.closePath();
+
+      ctx.beginPath();
+      ctx.fillStyle = "rgba(0, 255, 0, 0.1)";
+      ctx.strokeStyle = "rgba(0, 255, 0, 0.1)";
+      ctx.arc(x, (y += 20), 8, 0, 2 * Math.PI);
+      ctx.fill();
+      ctx.stroke();
+      ctx.fillStyle = "black";
+      ctx.fillText(this.$gettext("Navigable Range"), x + 14, y + 5);
+      ctx.closePath();
+    },
     initialDiagramValues() {
       this.dateFromD = this.dateFrom;
       this.dateToD = this.dateTo;
@@ -239,12 +291,7 @@
         templateData: this.templateData,
         diagramTitle: diagramTitle
       });
-      this.pdf.doc.save(
-        this.downloadFilename(
-          this.$gettext("Waterlevel"),
-          this.selectedGauge.properties.objname
-        ) + ".pdf"
-      );
+      this.pdf.doc.save(this.fileName + ".pdf");
     },
     applyChange() {
       if (this.form.template.hasOwnProperty("properties")) {
@@ -365,8 +412,8 @@
           ? d3.timeFormat("%H:%M")
           : d3.timeMonth(date) < date
           ? d3.timeWeek(date) < date
-            ? d3.timeFormat("%a %d")
-            : d3.timeFormat("%b %d")
+            ? d3.timeFormat(app.$gettext("%a %d"))
+            : d3.timeFormat(app.$gettext("%b %d"))
           : d3.timeYear(date) < date
           ? d3.timeFormat("%B")
           : d3.timeFormat("%Y"))(date);
@@ -399,6 +446,14 @@
         .append("svg")
         .attr("width", "100%")
         .attr("height", "100%");
+      // add white background in the size of the svg
+      // to solve alpha-channel problem when using canvg to export image
+      svg
+        .append("g")
+        .append("rect")
+        .attr("width", "100%")
+        .attr("height", "100%")
+        .attr("fill", "#ffffff");
 
       // create container for main diagram
       const diagram = svg
--- a/client/src/components/identify/formatter.js	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/identify/formatter.js	Wed Oct 30 11:31:12 2019 +0100
@@ -14,7 +14,7 @@
         p.key === "gm_measuredate") &&
       p.val !== null
     ) {
-      p.val = filters.dateTime(p.val);
+      p.val = filters.surveyDate(p.val);
     }
 
     // remove certain props
--- a/client/src/components/importoverview/ImportOverview.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/importoverview/ImportOverview.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -84,6 +84,7 @@
           { id: 'country', title: `${countryLabel}`, width: '55px' },
           { id: 'signer', title: `${signerLabel}`, width: '80px' },
           { id: 'state', title: `${statusLabel}`, width: '72px' },
+          { id: 'changed', title: `${changedLabel}`, width: '138px' },
           { id: 'warnings', icon: 'exclamation-triangle', width: '44px' }
         ]"
       />
@@ -135,6 +136,7 @@
 import { sortTable } from "@/lib/mixins";
 import { HTTP } from "@/lib/http";
 import app from "@/main";
+import { saveAs } from "file-saver";
 import {
   startOfDay,
   startOfHour,
@@ -190,6 +192,9 @@
     enqueuedLabel() {
       return this.$gettext("Enqueued");
     },
+    changedLabel() {
+      return this.$gettext("Changed");
+    },
     ownerLabel() {
       return this.$gettext("Owner");
     },
@@ -252,7 +257,7 @@
               }
             });
             HTTP.get(
-              `/imports?from=${encodeURIComponent(
+              `/imports/export?from=${encodeURIComponent(
                 format(startOfDay(new Date(from)), "YYYY-MM-DDTHH:mm:ssZ")
               )}&to=${encodeURIComponent(
                 format(endOfDay(new Date(to)), "YYYY-MM-DDTHH:mm:ssZ")
@@ -262,31 +267,16 @@
               }
             )
               .then(response => {
-                const { imports } = response.data;
+                const imports = response.data;
                 app.$snotify.clear();
                 if (!imports) return;
-                const csvLink =
-                  "data:text/csv;charset=utf-8," +
-                  encodeURIComponent(
-                    "id, kind, enqueued, user, country, signer, state, warnings\n" +
-                      imports
-                        .map(el => {
-                          return ` ${el.id}, ${el.kind}, ${
-                            el.enqueued
-                          }, ${el.user || " "}, ${this.userCountries[el.user] ||
-                            " "}, ${el.signer || " "}, ${el.state ||
-                            " "}, ${el.warnings || " "}`;
-                        })
-                        .join("\n")
-                  );
-                let element = document.createElement("a");
-                element.setAttribute("download", "log.txt");
-                element.setAttribute("href", csvLink);
-                document.querySelector("body").appendChild(element);
-                element.click();
-                document.querySelector("body").removeChild(element);
+                const csvFile = new Blob([new TextEncoder().encode(imports)], {
+                  type: "text/csv"
+                });
+                saveAs(csvFile, "log.csv");
               })
               .catch(error => {
+                console.log(error);
                 const { status, data } = error.response;
                 app.$snotify.clear();
                 displayError({
--- a/client/src/components/importoverview/LogEntry.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/importoverview/LogEntry.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -27,6 +27,9 @@
     <div style="width: 72px;" :class="stateStyle">
       {{ entry.state }}
     </div>
+    <div style="width: 138px;" class="table-cell center">
+      {{ entry.changed | dateTime }}
+    </div>
     <div style="width: 44px;" class="table-cell center">
       <font-awesome-icon
         v-if="entry.warnings || entry.errors"
--- a/client/src/components/layers/Layerselect.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/layers/Layerselect.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -137,7 +137,7 @@
     },
     loadLegendImage(layer, storeTarget) {
       HTTP.get(
-        `/internal/wms?REQUEST=GetLegendGraphic&VERSION=1.3.0&FORMAT=image/png&WIDTH=20&HEIGHT=20&LAYER=${layer}&legend_options=columns:4;fontAntiAliasing:true&SCALE=5000`,
+        `/internal/wms?REQUEST=GetLegendGraphic&VERSION=1.3.0&FORMAT=image/png&WIDTH=20&HEIGHT=20&LAYER=${layer}&legend_options=columns:4;fontAntiAliasing:true&SCALE=4000`,
         {
           headers: {
             Accept: "image/png",
--- a/client/src/components/layers/LegendElement.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/layers/LegendElement.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -150,6 +150,7 @@
         controls: [],
         interactions: [],
         view: new View({
+          enableRotation: false,
           center: [0, 0],
           zoom: 3,
           projection: "EPSG:4326"
--- a/client/src/components/map/Map.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/map/Map.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -104,6 +104,7 @@
       } else {
         this.map.setView(
           new View({
+            enableRotation: false,
             center: [this.extent.lon, this.extent.lat],
             minZoom: 5, // restrict zooming out to ~size of Europe for width 1000px
             zoom: this.extent.zoom,
@@ -229,6 +230,7 @@
         this.$store.commit(
           "map/syncedView",
           new View({
+            enableRotation: false,
             center: [this.extent.lon, this.extent.lat],
             minZoom: 5, // restrict zooming out to ~size of Europe for width 1000px
             zoom: this.extent.zoom,
--- a/client/src/components/sections/Sections.vue	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/sections/Sections.vue	Wed Oct 30 11:31:12 2019 +0100
@@ -48,6 +48,10 @@
                 />
               </button>
               <button
+                v-if="
+                  userCountry === 'global' ||
+                    section.properties.country === userCountry
+                "
                 class="btn btn-xs btn-dark"
                 @click="deleteSection(section)"
               >
@@ -114,6 +118,8 @@
     };
   },
   computed: {
+    ...mapState("user", ["user"]),
+    ...mapGetters("usermanagement", ["userCountries"]),
     ...mapState("application", ["searchQuery"]),
     ...mapGetters("map", ["openLayersMap"]),
     ...mapState("imports", ["sections"]),
@@ -131,6 +137,9 @@
     },
     reviewTooltip() {
       return this.$gettext("Review pending import");
+    },
+    userCountry() {
+      return this.userCountries[this.user];
     }
   },
   methods: {
--- a/client/src/components/systemconfiguration/defaults.js	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/components/systemconfiguration/defaults.js	Wed Oct 30 11:31:12 2019 +0100
@@ -7,9 +7,9 @@
   gm_forecast_vs_reality_nsc_24h: "-12.5",
   gm_forecast_vs_reality_nsc_72h: "-12.5",
   morphology_classbreaks:
-    "1:#ff00dd,1.5,1.7,1.9,2.1,2.3,2.5:#f25f20,2.7,2.9,3.1:#f7e40e,3.3,3.5,4:#8ad51a,4.5,5,5.5,6,6.5,7:#1414ff",
+    "-2.5:#5A3300,-2,-1.5,-1,-0.5,0:#D8B050,0.5:#FF0000,1,1.5,2:#FF9999,2.5:#A6B8FA,3,3.5,4,4.5,5,6:#0101FF,7:#88DD55,8,9,10,11,12:#146E33",
   morphology_classbreaks_compare:
-    "-2:#06b100,-1.9,-1.8,-1.7,-1.6,-1.5,-1.4,-1.3,-1.2,-1.1,-1:#1cc68e,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,-0.1,0:#c2c2c2,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1:#fff01a,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2:#f80012",
+    "-2:#06b100,-1.8,-1.6,-1.4,-1.2,-1:#1cc68e,-0.8,-0.6,-0.4,-0.2,0:#c2c2c2,0.2,0.4,0.6,0.8,1:#fff01a,1.2,1.4,1.6,1.8,2:#f80012",
   ecdis_wms_url: "https://service.d4d-portal.info/wms/",
   ecdis_wms_params: '{"LAYERS": "d4d", "VERSION": "1.1.1", "TILED": true}',
   feature_colours_bottlenecks_stroke: "#fa28ff",
--- a/client/src/lib/mixins.js	Wed Oct 23 12:37:01 2019 +0200
+++ b/client/src/lib/mixins.js	Wed Oct 30 11:31:12 2019 +0100
@@ -20,6 +20,7 @@
 import { HTTP } from "@/lib/http";
 import * as d3 from "d3";
 import sanitize from "sanitize-filename";
+import canvg from "canvg";
 
 /*eslint no-unused-vars: ["error", { "varsIgnorePattern": "[debugSVG|_]" }]*/
 const debugSVG = ({ svg, svgWidth, svgHeight }) => {
@@ -167,6 +168,54 @@
     ...mapState("user", ["user"])
   },
   methods: {
+    downloadImage(elementName, title) {
+      const offScreen = document.querySelector("#offScreen");
+      const DPI = 96;
+      const svgWidth = this.millimeter2pixels(428, DPI);
+      const svgHeight = this.millimeter2pixels(119, DPI);
+      offScreen.style.width = `${svgWidth}px`;
+      offScreen.style.height = `${svgHeight}px`;
+      let zoomLevel = this.zoomStore;
+      const layout = this.getPrintLayout(svgHeight, svgWidth);
+      this.renderTo({
+        element: offScreen,
+        dimensions: this.getDimensions({
+          svgWidth: svgWidth,
+          svgHeight: svgHeight,
+          ...layout
+        }),
+        zoomLevel // passing the zoom level to draw the diagram on pdf at this point
+      });
+      const diagramContainer = document.getElementById("offScreen");
+      const { clientHeight, clientWidth } = diagramContainer;
+      diagramContainer.querySelector("svg").setAttribute("width", clientWidth);
+      diagramContainer
+        .querySelector("svg")
+        .setAttribute("height", clientHeight + 180);
+      const svg = diagramContainer.querySelector("svg").outerHTML;
+      const canvas = document.createElement("canvas");
+      canvg(canvas, svg, { offsetY: 70 });
+      const ctx = canvas.getContext("2d");
+      // Draw white rectangular and place the title on it
+      ctx.beginPath();
+      ctx.fillStyle = "#ffffff";
+      ctx.fillRect(0, 0, clientWidth, 70);
+      ctx.stroke();
+      ctx.fillStyle = "steelblue";
+      ctx.font = "500 30px sans-serif";
+      ctx.textAlign = "center";
+      ctx.fillText(title, clientWidth / 2, 35);
+      ctx.closePath();
+      // Add diagramlegend
+      this.addLegendToCanvas(ctx, {
+        height: clientHeight + 100,
+        width: clientWidth
+      });
+
+      const imgData = canvas.toDataURL("image/png");
+      document.getElementById(elementName).setAttribute("href", imgData);
+      offScreen.removeChild(offScreen.firstChild);
+    },
     addDiagram(position, offset, width, height) {
       let x = offset.x,
         y = offset.y;
--- a/cmd/srsimplify/main.go	Wed Oct 23 12:37:01 2019 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-// This is Free Software under GNU Affero General Public License v >= 3.0
-// without warranty, see README.md and license for details.
-//
-// SPDX-License-Identifier: AGPL-3.0-or-later
-// License-Filename: LICENSES/AGPL-3.0.txt
-//
-// Copyright (C) 2019 by via donau
-//   – Österreichische Wasserstraßen-Gesellschaft mbH
-// Software engineering by Intevation GmbH
-//
-// Author(s):
-//  * Sascha L. Teichmann <sascha.teichmann@intevation.de>
-
-package main
-
-import (
-	"bufio"
-	"flag"
-	"fmt"
-	"io"
-	"log"
-	"os"
-	"strconv"
-	"strings"
-
-	"gemma.intevation.de/gemma/pkg/octree"
-)
-
-func loadXYZ(r io.Reader) (octree.MultiPointZ, error) {
-
-	scanner := bufio.NewScanner(r)
-
-	points := make(octree.MultiPointZ, 0, 2000000)
-
-	var x, y, z float64
-	var err error
-
-	for scanner.Scan() {
-		line := strings.TrimSpace(scanner.Text())
-		if len(line) == 0 || strings.HasPrefix(line, "#") {
-			continue
-		}
-		parts := strings.SplitN(line, " ", 3)
-		if len(parts) != 3 {
-			continue
-		}
-
-		if x, err = strconv.ParseFloat(parts[0], 64); err != nil {
-			return nil, err
-		}
-		if y, err = strconv.ParseFloat(parts[1], 64); err != nil {
-			return nil, err
-		}
-		if z, err = strconv.ParseFloat(parts[2], 64); err != nil {
-			return nil, err
-		}
-		points = append(points, octree.Vertex{X: x, Y: y, Z: z})
-	}
-
-	return points, nil
-}
-
-func storeXYZ(points octree.MultiPointZ, w io.Writer) error {
-	out := bufio.NewWriter(w)
-	for i := range points {
-		fmt.Fprintf(out, "%.5f,%.5f,%.5f\n",
-			points[i].X, points[i].Y, points[i].Z)
-	}
-	return out.Flush()
-}
-
-func main() {
-
-	var tolerance float64
-
-	flag.Float64Var(&tolerance, "t", 0.1, "accepted tolerance (shorthand)")
-	flag.Float64Var(&tolerance, "tolerance", 0.1, "accepted tolerance")
-
-	flag.Parse()
-
-	points, err := loadXYZ(os.Stdin)
-	if err != nil {
-		log.Fatalf("err: %v\n", err)
-	}
-
-	points = points.Simplify(tolerance)
-
-	if err := storeXYZ(points, os.Stdout); err != nil {
-		log.Fatalf("err: %v\n", err)
-	}
-}
--- a/pkg/controllers/diff.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/diff.go	Wed Oct 30 11:31:12 2019 +0100
@@ -21,7 +21,6 @@
 	"log"
 	"net/http"
 	"runtime"
-	"sync"
 	"time"
 
 	"gemma.intevation.de/gemma/pkg/auth"
@@ -84,21 +83,30 @@
   $2,
   ST_Transform(
     ST_Multi(
-      ST_CollectionExtract(
-        ST_MakeValid(
-          ST_Multi(
-             ST_Collectionextract(
-                ST_SimplifyPreserveTopology(ST_GeomFromWKB($4, $3::integer), $5),
-                3
-             )
-            )
-        ),
+      ST_Collectionextract(
+        ST_SimplifyPreserveTopology(ST_GeomFromWKB($4, $3::integer), $5),
         3
       )
     ),
     4326
   )
 `
+	commonDiffBBoxSQL = `
+WITH joined AS (
+  SELECT
+    sr.area      AS area,
+    sr.date_info AS date_info
+  FROM waterway.sounding_results sr
+  WHERE sr.bottleneck_id = $1
+),
+bbox AS (
+  SELECT ST_Extent(ST_intersection(
+    (SELECT ST_Transform(area::geometry, $2::int) FROM joined WHERE date_info = $3::date),
+    (SELECT ST_Transform(area::geometry, $2::int) FROM joined WHERE date_info = $4::date)
+  )) AS area
+)
+SELECT ST_XMin(area), ST_YMin(area), ST_XMax(area), ST_YMax(area) FROM bbox
+`
 )
 
 type (
@@ -169,6 +177,38 @@
 			dci.Minuend.Format(common.DateFormat))
 	}
 
+	epsg := minuendTree.EPSG()
+
+	var box octree.Box2D
+
+	switch err := conn.QueryRowContext(
+		ctx,
+		commonDiffBBoxSQL,
+		dci.Bottleneck,
+		epsg,
+		dci.Minuend.Time,
+		dci.Subtrahend.Time,
+	).Scan(&box.X1, &box.Y1, &box.X2, &box.Y2); {
+	case err == sql.ErrNoRows:
+		return 0, errors.New("No such intersection")
+	case err != nil:
+		return 0, err
+	}
+
+	if box.Empty() {
+		return 0, errors.New("Intersection is empty")
+	}
+
+	log.Printf("info: bbox of intersection: (%.2f, %.2f) - (%.2f, %.2f)\n",
+		box.X1, box.Y1, box.X2, box.Y2)
+
+	start = time.Now()
+	raster := octree.NewRaster(box, isoCellSize)
+	raster.Rasterize(minuendTree.Value)
+	log.Printf("info: rasterizing minuend took %v\n", time.Since(start))
+
+	minuendTree = nil
+
 	start = time.Now()
 
 	subtrahendTree, err := octree.FromCache(
@@ -187,85 +227,15 @@
 	}
 
 	// We need a slow path implementation for this.
-	epsg := minuendTree.EPSG()
 	if epsg != subtrahendTree.EPSG() {
 		return 0, errors.New("Calculating differences between two different " +
 			"EPSG code meshes are not supported, yet.")
 	}
 
 	start = time.Now()
-	points := minuendTree.Diff(subtrahendTree)
+	raster.Diff(subtrahendTree.Value)
 	log.Printf("info: A - B took %v\n", time.Since(start))
-
-	minuendTree, subtrahendTree = nil, nil
-
-	// The Triangulation and the loading of the clipping
-	// polygon can be done concurrently.
-
-	jobs := make(chan func())
-
-	wg := new(sync.WaitGroup)
-	for i := 0; i < 2; i++ {
-		wg.Add(1)
-		go func() {
-			defer wg.Done()
-			for job := range jobs {
-				job()
-			}
-		}()
-	}
-
-	var (
-		tri     *octree.Triangulation
-		triErr  error
-		clip    *octree.Polygon
-		clipErr error
-	)
-
-	jobs <- func() {
-		start := time.Now()
-		tri, triErr = points.Triangulate()
-		log.Printf("info: triangulation took %v\n", time.Since(start))
-	}
-
-	jobs <- func() {
-		start := time.Now()
-		clip, clipErr = octree.LoadClippingPolygon(
-			ctx, conn,
-			epsg,
-			dci.Bottleneck,
-			dci.Minuend.Time,
-			dci.Subtrahend.Time)
-		log.Printf("info: loading clipping polygon took %v\n", time.Since(start))
-	}
-	close(jobs)
-	wg.Wait()
-
-	switch {
-	case triErr != nil && clipErr != nil:
-		return 0, fmt.Errorf("%v %v", triErr, clipErr)
-	case triErr != nil:
-		return 0, triErr
-	case clipErr != nil:
-		return 0, clipErr
-	}
-
-	start = time.Now()
-	tin := tri.Tin()
-	removed := tin.Clip(clip)
-	clip = nil
-	log.Printf("info: clipping TIN took %v\n", time.Since(start))
-
-	log.Printf("info: Number of triangles to clip: %d\n", len(removed))
-
-	start = time.Now()
-	var tree octree.STRTree
-
-	tree.BuildWithout(tin, removed)
-
-	log.Printf("info: Building final mesh took: %v\n", time.Since(start))
-
-	start = time.Now()
+	subtrahendTree = nil
 
 	// XXX: Maybe we should start this transaction earlier!?
 	var tx *sql.Tx
@@ -274,6 +244,13 @@
 	}
 	defer tx.Rollback()
 
+	zMin, zMax, ok := raster.ZExtent()
+	if !ok {
+		return 0, errors.New("Scans do not have common points")
+	}
+
+	log.Printf("info: z range: %.3f - %.3f\n", zMin, zMax)
+
 	var heights []float64
 
 	heights, err = octree.LoadClassBreaks(
@@ -282,13 +259,12 @@
 	if err != nil {
 		log.Printf("warn: Loading class breaks failed: %v\n", err)
 		err = nil
-		heights = octree.SampleDiffHeights(tin.Min.Z, tin.Max.Z, contourStep)
+		heights = octree.SampleDiffHeights(zMin, zMax, contourStep)
 	} else {
-		heights = octree.ExtrapolateClassBreaks(heights, tin.Min.Z, tin.Max.Z)
-		// heights = octree.InBetweenClassBreaks(heights, 0.05, 2)
+		heights = octree.ExtrapolateClassBreaks(heights, zMin, zMax)
 	}
 
-	log.Printf("info: z range: %.3f - %.3f\n", tin.Min.Z, tin.Max.Z)
+	heights = common.DedupFloat64s(heights)
 
 	log.Printf("info: num heights: %d\n", len(heights))
 
@@ -310,9 +286,9 @@
 		return 0, err
 	}
 
-	heights = common.DedupFloat64s(heights)
+	areas := raster.Trace(heights)
 
-	areas := octree.TraceAreas(heights, isoCellSize, tin.Min, tin.Max, tree.Value)
+	raster = nil
 
 	var size int
 
@@ -335,7 +311,7 @@
 	log.Printf("info: Transferred WKB size: %.2fMB.\n",
 		float64(size)/(1024*1024))
 
-	log.Printf("info: calculating and storing iso lines took %v\n",
+	log.Printf("info: calculating and storing iso areas took %v\n",
 		time.Since(start))
 
 	if err = tx.Commit(); err != nil {
--- a/pkg/controllers/gauges.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/gauges.go	Wed Oct 30 11:31:12 2019 +0100
@@ -60,16 +60,9 @@
     water_level
   FROM waterway.gauge_predictions
 ) AS gmp
-WHERE
-  location = (
-    $1::char(2),
-    $2::char(3),
-    $3::char(5),
-    $4::char(5),
-    $5::int
-  ) AND
-  measure_date BETWEEN
-    $6::timestamp - '72hours'::interval AND $6::timestamp
+WHERE location = ($1, $2, $3, $4, $5)::isrs
+  AND measure_date BETWEEN
+    $6::timestamptz - '72hours'::interval AND $6::timestamptz
 ORDER BY measure_date, date_issue
 `
 	selectWaterlevelsSQL = `
--- a/pkg/controllers/importqueue.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/importqueue.go	Wed Oct 30 11:31:12 2019 +0100
@@ -16,6 +16,7 @@
 import (
 	"context"
 	"database/sql"
+	"encoding/csv"
 	"encoding/json"
 	"fmt"
 	"log"
@@ -45,6 +46,7 @@
   imports.id AS id,
   state::varchar,
   enqueued,
+  changed,
   kind,
   username,
   signer,
@@ -56,6 +58,23 @@
 FROM import.imports
 WHERE
 `
+	selectExportSQL = `
+SELECT
+  imports.id AS id,
+  state::varchar,
+  enqueued,
+  changed,
+  kind,
+  username,
+  (SELECT country FROM users.list_users lu
+    WHERE lu.username = import.imports.username) AS country,
+  signer,
+  EXISTS(SELECT 1 FROM import.import_logs
+    WHERE kind = 'warn'::log_type and import_id = imports.id) AS has_warnings,
+  data
+FROM import.imports
+WHERE
+`
 	selectEnqueuedSQL = `
 SELECT enqueued FROM import.imports
 WHERE
@@ -88,7 +107,7 @@
 	args []interface{}
 }
 
-func buildFilters(req *http.Request) (*filledStmt, *filledStmt, *filledStmt, error) {
+func buildFilters(projection string, req *http.Request) (*filledStmt, *filledStmt, *filledStmt, error) {
 
 	var l, a, b filterAnd
 
@@ -174,12 +193,16 @@
 
 	var counting bool
 
-	switch count := strings.ToLower(req.FormValue("count")); count {
-	case "1", "t", "true":
-		counting = true
-		fl.stmt.WriteString(selectImportsCountSQL)
-	default:
-		fl.stmt.WriteString(selectImportsSQL)
+	if projection != "" {
+		fl.stmt.WriteString(projection)
+	} else {
+		switch count := strings.ToLower(req.FormValue("count")); count {
+		case "1", "t", "true":
+			counting = true
+			fl.stmt.WriteString(selectImportsCountSQL)
+		default:
+			fl.stmt.WriteString(selectImportsSQL)
+		}
 	}
 
 	if len(l) == 0 {
@@ -230,11 +253,131 @@
 	return &models.ImportTime{Time: when.UTC()}
 }
 
+func exportImports(rw http.ResponseWriter, req *http.Request) {
+
+	list, _, _, err := buildFilters(selectExportSQL, req)
+	if err != nil {
+		http.Error(rw, "error: "+err.Error(), http.StatusBadRequest)
+		return
+	}
+
+	rw.Header().Add("Content-Type", "text/csv")
+	out := csv.NewWriter(rw)
+
+	record := []string{
+		"#id",
+		"#kind",
+		"#enqueued",
+		"#changed",
+		"#user",
+		"#country",
+		"#signer",
+		"#state",
+		"#warnings",
+		"#source",
+	}
+
+	if err := out.Write(record); err != nil {
+		// Too late for HTTP status message.
+		log.Printf("error: %v\n", err)
+		return
+	}
+
+	conn := mw.GetDBConn(req)
+	ctx := req.Context()
+	var rows *sql.Rows
+	if rows, err = conn.QueryContext(ctx, list.stmt.String(), list.args...); err != nil {
+		log.Printf("error: %v\n", err)
+		return
+	}
+	defer rows.Close()
+
+	stringString := func(s sql.NullString) string {
+		if s.Valid {
+			return s.String
+		}
+		return ""
+	}
+
+	// Extract some meta infos from the import.
+	type Description interface {
+		Description() (string, error)
+	}
+
+	for rows.Next() {
+		var (
+			id          int64
+			state       string
+			enqueued    time.Time
+			changed     time.Time
+			kind        string
+			user        string
+			country     string
+			signer      sql.NullString
+			warnings    bool
+			data        string
+			description string
+		)
+		if err = rows.Scan(
+			&id,
+			&state,
+			&enqueued,
+			&changed,
+			&kind,
+			&user,
+			&country,
+			&signer,
+			&warnings,
+			&data,
+		); err != nil {
+			return
+		}
+
+		// Do some introspection on the job to be more verbose.
+		if jc := imports.FindJobCreator(imports.JobKind(kind)); jc != nil {
+			job := jc.Create()
+			if err := common.FromJSONString(data, job); err != nil {
+				log.Printf("error: %v\n", err)
+			} else if desc, ok := job.(Description); ok {
+				if description, err = desc.Description(); err != nil {
+					log.Printf("error: %v\n", err)
+				}
+			}
+		}
+
+		record[0] = strconv.FormatInt(id, 10)
+		record[1] = kind
+		record[2] = enqueued.UTC().Format(common.TimeFormat)
+		record[3] = changed.UTC().Format(common.TimeFormat)
+		record[4] = user
+		record[5] = country
+		record[6] = stringString(signer)
+		record[7] = state
+		record[8] = strconv.FormatBool(warnings)
+		record[9] = strings.Replace(description, ",", "|", -1)
+
+		if err := out.Write(record); err != nil {
+			log.Printf("error: %v\n", err)
+			return
+		}
+	}
+
+	out.Flush()
+	if err := out.Error(); err != nil {
+		log.Printf("error: %v\n", err)
+	}
+
+	if err = rows.Err(); err != nil {
+		log.Printf("error: %v\n", err)
+		return
+	}
+}
+
 func listImports(req *http.Request) (jr mw.JSONResult, err error) {
 
 	var list, before, after *filledStmt
 
-	if list, before, after, err = buildFilters(req); err != nil {
+	if list, before, after, err = buildFilters("", req); err != nil {
 		return
 	}
 
@@ -273,10 +416,12 @@
 	for rows.Next() {
 		var it models.Import
 		var enqueued time.Time
+		var changed time.Time
 		if err = rows.Scan(
 			&it.ID,
 			&it.State,
 			&enqueued,
+			&changed,
 			&it.Kind,
 			&it.User,
 			&signer,
@@ -290,6 +435,7 @@
 			it.Signer = signer.String
 		}
 		it.Enqueued = models.ImportTime{Time: enqueued.UTC()}
+		it.Changed = models.ImportTime{Time: changed.UTC()}
 		imports = append(imports, &it)
 	}
 
@@ -446,6 +592,7 @@
 	reviewSQL = `
 UPDATE import.imports SET
   state = $1::import_state,
+  changed = CURRENT_TIMESTAMP,
   signer = $2
 WHERE id = $3`
 
--- a/pkg/controllers/routes.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/routes.go	Wed Oct 30 11:31:12 2019 +0100
@@ -39,6 +39,16 @@
 		any           = auth.EnsureRole("sys_admin", "waterway_admin", "waterway_user")
 	)
 
+	// Password resets.
+	api.Handle("/users/passwordreset", &mw.JSONHandler{
+		Input:  func(*http.Request) interface{} { return new(models.PWResetUser) },
+		Handle: passwordResetRequest,
+		NoConn: true,
+	}).Methods(http.MethodPost)
+
+	api.HandleFunc("/users/passwordreset/{hash}", passwordReset).
+		Methods(http.MethodGet)
+
 	// User management.
 	api.Handle("/users", any(&mw.JSONHandler{
 		Handle: listUsers,
@@ -88,16 +98,6 @@
 		Handle: setSystemSettings,
 	})).Methods(http.MethodPut)
 
-	// Password resets.
-	api.Handle("/users/passwordreset", &mw.JSONHandler{
-		Input:  func(*http.Request) interface{} { return new(models.PWResetUser) },
-		Handle: passwordResetRequest,
-		NoConn: true,
-	}).Methods(http.MethodPost)
-
-	api.HandleFunc("/users/passwordreset/{hash}", passwordReset).
-		Methods(http.MethodGet)
-
 	// Print templates
 	api.Handle("/templates", any(&mw.JSONHandler{
 		Handle: listPrintTemplates,
@@ -285,12 +285,13 @@
 		})).Methods(http.MethodGet)
 
 	// Import queue
-	lsImports := waterwayAdmin(&mw.JSONHandler{
+
+	api.Handle("/imports", waterwayAdmin(&mw.JSONHandler{
 		Handle: listImports,
-	})
+	})).Methods(http.MethodGet)
 
-	api.Handle("/imports", lsImports).
-		Methods(http.MethodGet)
+	api.Handle("/imports/export", waterwayAdmin(
+		mw.DBConn(http.HandlerFunc(exportImports)))).Methods(http.MethodGet)
 
 	api.Handle("/imports/{id:[0-9]+}", waterwayAdmin(&mw.JSONHandler{
 		Handle: importLogs,
--- a/pkg/controllers/stretches.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/stretches.go	Wed Oct 30 11:31:12 2019 +0100
@@ -10,6 +10,7 @@
 //
 // Author(s):
 //  * Sascha L. Teichmann <sascha.teichmann@intevation.de>
+//  * Sascha Wilde <wilde@intevation.de>
 
 package controllers
 
@@ -83,6 +84,39 @@
 	return false
 }
 
+func maxDuration(a time.Duration, b time.Duration) time.Duration {
+	if a > b {
+		return a
+	}
+	return b
+}
+
+func sumClassesTo(breaks []time.Duration, to int) time.Duration {
+	var result time.Duration
+	for i := 0; i <= to; i++ {
+		result += breaks[i]
+	}
+	return result
+}
+
+func aggregateClasses(
+	new []time.Duration,
+	agg []time.Duration,
+) []time.Duration {
+	newAgg := make([]time.Duration, len(agg))
+
+	for i := 0; i < len(new)-1; i++ {
+		oldSum := sumClassesTo(agg, i)
+		newSum := sumClassesTo(new, i)
+		newAgg[i] = maxDuration(newSum, oldSum) - sumClassesTo(newAgg, i-1)
+	}
+	// adjust highest class so the sum of all classes in agg
+	// matches the original sum of all classes in new.
+	newAgg[len(new)-1] =
+		sumClassesTo(new, len(new)-1) - sumClassesTo(newAgg, len(new)-2)
+	return newAgg
+}
+
 func loadFullStretchBottleneck(
 	ctx context.Context,
 	conn *sql.Conn,
@@ -311,12 +345,8 @@
 					if ldc == nil {
 						ldc, breaks = l, b
 					} else {
-						for i, v := range l {
-							ldc[i] += v
-						}
-						for i, v := range b {
-							breaks[i] += v
-						}
+						ldc = aggregateClasses(l, ldc)
+						breaks = aggregateClasses(b, breaks)
 					}
 				}
 
@@ -381,9 +411,6 @@
 		return
 	}
 
-	// Normalize to look like as we have only one bottleneck.
-	scale := 1 / float64(len(loaded))
-
 	empty := fmt.Sprintf("%.3f", 0.0)
 	for i := range record[1:] {
 		record[i+1] = empty
@@ -391,12 +418,6 @@
 
 	for _, r := range results {
 		// Round to full days
-		for i, v := range r.ldc {
-			r.ldc[i] = time.Duration(float64(v) * scale)
-		}
-		for i, v := range r.breaks {
-			r.breaks[i] = time.Duration(float64(v) * scale)
-		}
 		ldcRounded := common.RoundToFullDays(r.ldc)
 		rangesRounded := common.RoundToFullDays(r.breaks)
 
@@ -588,16 +609,12 @@
 					if ldc == nil {
 						ldc, breaks = l, b
 					} else {
-						for i, v := range l {
-							ldc[i] += v
-						}
-						for i, v := range b {
-							breaks[i] += v
-						}
+						ldc = aggregateClasses(l, ldc)
+						breaks = aggregateClasses(b, breaks)
 					}
 				}
 
-				duration := res.to.Sub(res.from) * time.Duration(len(loaded))
+				duration := res.to.Sub(res.from)
 
 				res.ldc = durationsToPercentage(duration, ldc)
 				res.breaks = durationsToPercentage(duration, breaks)
--- a/pkg/controllers/surveys.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/controllers/surveys.go	Wed Oct 30 11:31:12 2019 +0100
@@ -28,7 +28,7 @@
 
 const (
 	listSurveysSQL = `
-SELECT
+SELECT DISTINCT
   s.bottleneck_id,
   s.date_info::text,
   s.depth_reference,
@@ -41,7 +41,7 @@
   LEFT JOIN waterway.gauges_reference_water_levels AS r
     ON s.depth_reference = r.depth_reference
       AND g.location = r.location AND g.validity = r.validity
-WHERE b.objnam = $1 AND s.date_info::timestamptz <@ b.validity`
+WHERE b.objnam = $1`
 )
 
 func listSurveys(req *http.Request) (jr mw.JSONResult, err error) {
--- a/pkg/imports/bn.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/bn.go	Wed Oct 30 11:31:12 2019 +0100
@@ -159,6 +159,14 @@
 )`
 )
 
+func (bn *Bottleneck) Description() (string, error) {
+
+	var descs []string
+
+	descs = append(descs, bn.URL)
+	return strings.Join(descs, "|"), nil
+}
+
 type bnJobCreator struct{}
 
 func init() {
--- a/pkg/imports/dma.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/dma.go	Wed Oct 30 11:31:12 2019 +0100
@@ -41,6 +41,10 @@
 	Password string `json:"password,omitempty"`
 }
 
+func (dma *DistanceMarksAshore) Description() (string, error) {
+	return dma.URL + "|" + dma.FeatureType, nil
+}
+
 // DMAJobKind is the import queue type identifier.
 const DMAJobKind JobKind = "dma"
 
--- a/pkg/imports/dmv.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/dmv.go	Wed Oct 30 11:31:12 2019 +0100
@@ -36,6 +36,10 @@
 	Insecure bool `json:"insecure"`
 }
 
+func (dmv *DistanceMarksVirtual) Description() (string, error) {
+	return dmv.URL, nil
+}
+
 // DMVJobKind is the import queue type identifier.
 const DMVJobKind JobKind = "dmv"
 
--- a/pkg/imports/dsr.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/dsr.go	Wed Oct 30 11:31:12 2019 +0100
@@ -1,4 +1,3 @@
-// This is Free Software under GNU Affero General Public License v >= 3.0
 // without warranty, see README.md and license for details.
 //
 // SPDX-License-Identifier: AGPL-3.0-or-later
@@ -19,6 +18,7 @@
 	"database/sql"
 	"errors"
 
+	"gemma.intevation.de/gemma/pkg/common"
 	"gemma.intevation.de/gemma/pkg/models"
 )
 
@@ -28,6 +28,10 @@
 	Date         models.Date `json:"date-info"`
 }
 
+func (dsr *DeleteSoundingResult) Description() (string, error) {
+	return dsr.BottleneckId + "|" + dsr.Date.Format(common.DateFormat), nil
+}
+
 // DSRJobKind is the import queue type identifier.
 const DSRJobKind JobKind = "dsr"
 
--- a/pkg/imports/fa.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/fa.go	Wed Oct 30 11:31:12 2019 +0100
@@ -160,6 +160,10 @@
 )ON CONFLICT ON CONSTRAINT fa_reference_values_pkey DO NOTHING`
 )
 
+func (fa *FairwayAvailability) Description() (string, error) {
+	return fa.URL, nil
+}
+
 type faJobCreator struct{}
 
 func init() {
--- a/pkg/imports/fd.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/fd.go	Wed Oct 30 11:31:12 2019 +0100
@@ -19,6 +19,7 @@
 	"encoding/json"
 	"fmt"
 	"io"
+	"strings"
 	"time"
 
 	"gemma.intevation.de/gemma/pkg/common"
@@ -46,6 +47,14 @@
 	Password string `json:"password,omitempty"`
 }
 
+func (fd *FairwayDimension) Description() (string, error) {
+	return strings.Join([]string{
+		fd.URL,
+		fd.FeatureType,
+		fmt.Sprintf("LOS%d", fd.LOS),
+	}, "|"), nil
+}
+
 type fdTime struct{ time.Time }
 
 var guessFDTime = common.TimeParser([]string{
--- a/pkg/imports/gm.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/gm.go	Wed Oct 30 11:31:12 2019 +0100
@@ -40,6 +40,10 @@
 	Insecure bool `json:"insecure"`
 }
 
+func (gm *GaugeMeasurement) Description() (string, error) {
+	return gm.URL, nil
+}
+
 // GMJobKind is the import queue type identifier.
 const GMJobKind JobKind = "gm"
 
@@ -386,6 +390,13 @@
 				} else {
 					unit = string(*measure.Unit)
 				}
+
+				if measure.Value == nil {
+					feedback.Warn("Missing mandatory value at %s. Ignored (bad service)",
+						measure.Measuredate.Format(time.RFC3339))
+					continue
+				}
+
 				convert, err := rescale(unit)
 				if err != nil {
 					return nil, err
@@ -449,11 +460,6 @@
 						newP++
 					}
 				} else {
-					if measure.Value == nil {
-						feedback.Info("Missing value at %s. Ignored",
-							measure.Measuredate.Format(time.RFC3339))
-						continue
-					}
 					err = insertGMStmt.QueryRowContext(
 						ctx,
 						currIsrs.CountryCode,
--- a/pkg/imports/isr.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/isr.go	Wed Oct 30 11:31:12 2019 +0100
@@ -106,7 +106,7 @@
 	return ids, nil
 }
 
-// Do executes the actual refreshing of the iso lines.
+// Do executes the actual refreshing of the iso areas.
 func (isr *IsoRefresh) Do(
 	ctx context.Context,
 	importID int64,
@@ -115,7 +115,7 @@
 ) (interface{}, error) {
 
 	start := time.Now()
-	feedback.Info("Regenerating contour lines for sounding results " +
+	feedback.Info("Regenerating iso areas for sounding results " +
 		"after configuration change")
 	defer func() {
 		feedback.Info(
@@ -184,7 +184,17 @@
 		}
 
 		// Calculate and store the iso areas.
-		areas := octree.TraceAreas(hs, isoCellSize, tree.Min(), tree.Max(), tree.Value)
+		box := octree.Box2D{
+			X1: tree.Min().X,
+			Y1: tree.Min().Y,
+			X2: tree.Max().X,
+			Y2: tree.Max().Y,
+		}
+
+		raster := octree.NewRaster(box, isoCellSize)
+		raster.Rasterize(tree.Value)
+		areas := raster.Trace(hs)
+
 		for i, a := range areas {
 			if len(a) == 0 {
 				continue
--- a/pkg/imports/queue.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/queue.go	Wed Oct 30 11:31:12 2019 +0100
@@ -138,7 +138,9 @@
 	queueUser = "sys_admin"
 
 	reEnqueueRunningSQL = `
-UPDATE import.imports SET state = 'queued'::import_state
+UPDATE import.imports SET
+  state = 'queued'::import_state,
+  changed = CURRENT_TIMESTAMP
 WHERE state = 'running'::import_state`
 
 	insertJobSQL = `
@@ -180,12 +182,15 @@
 LIMIT 1`
 
 	updateStateSQL = `
-UPDATE import.imports SET state = $1::import_state
+UPDATE import.imports SET
+  state = $1::import_state,
+  changed = CURRENT_TIMESTAMP
 WHERE id = $2`
 
 	updateStateSummarySQL = `
 UPDATE import.imports SET
    state = $1::import_state,
+   changed = CURRENT_TIMESTAMP,
    summary = $2
 WHERE id = $3`
 
--- a/pkg/imports/sec.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/sec.go	Wed Oct 30 11:31:12 2019 +0100
@@ -16,6 +16,7 @@
 import (
 	"context"
 	"database/sql"
+	"strings"
 	"time"
 
 	"gemma.intevation.de/gemma/pkg/models"
@@ -33,6 +34,15 @@
 	Date      models.Date `json:"date-info"`
 }
 
+func (sec *Section) Description() (string, error) {
+	return strings.Join([]string{
+		sec.Name,
+		sec.ObjNam,
+		sec.From.String(),
+		sec.To.String(),
+	}, "|"), nil
+}
+
 // SECJobKind is the import queue type identifier.
 const SECJobKind JobKind = "sec"
 
--- a/pkg/imports/sr.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/sr.go	Wed Oct 30 11:31:12 2019 +0100
@@ -71,9 +71,16 @@
 )
 
 const (
+	// multiBeamThreshold is the number of points m² which
+	// is assumed that greater values are indicators for
+	// an already interpolated point cloud.
+	multiBeamThreshold = 1.0 / 5.0
+)
+
+const (
 	// pointsPerSquareMeter is the average number of points
 	// when generating a artifical height model for single beam scans.
-	pointsPerSquareMeter = 2
+	pointsPerSquareMeter = 1.0
 )
 
 const (
@@ -174,15 +181,8 @@
   $2,
   ST_Transform(
     ST_Multi(
-      ST_CollectionExtract(
-        ST_MakeValid(
-          ST_Multi(
-             ST_Collectionextract(
-                ST_SimplifyPreserveTopology(ST_GeomFromWKB($4, $3::integer), $5),
-                3
-             )
-            )
-        ),
+      ST_Collectionextract(
+        ST_SimplifyPreserveTopology(ST_GeomFromWKB($4, $3::integer), $5),
         3
       )
     ),
@@ -215,6 +215,22 @@
 `
 )
 
+func (sr *SoundingResult) Description() (string, error) {
+
+	var descs []string
+
+	if sr.Bottleneck != nil {
+		descs = append(descs, *sr.Bottleneck)
+	}
+	if sr.Date != nil {
+		descs = append(descs, (*sr).Date.Format(common.DateFormat))
+	}
+	if sr.NegateZ != nil && *sr.NegateZ {
+		descs = append(descs, "negateZ")
+	}
+	return strings.Join(descs, "|"), nil
+}
+
 func (sr *SoundingResult) singleBeam() bool {
 	return sr.SingleBeam != nil && *sr.SingleBeam
 }
@@ -495,6 +511,17 @@
 
 	if sr.singleBeam() {
 
+		origDensity := float64(len(xyz)) / polygonArea
+
+		feedback.Info("Boundary area: %.2fm²", polygonArea)
+		feedback.Info("Original point density: %.2f points/m²", origDensity)
+
+		if origDensity > multiBeamThreshold {
+			feedback.Warn("The density is greater than %.2f points/m².", multiBeamThreshold)
+			feedback.Warn("It is assumed that the data is already interpolated.")
+			goto multibeam
+		}
+
 		// Build the first mesh to generate random points on.
 
 		feedback.Info("Build virtual DEM based on original XYZ data.")
@@ -507,11 +534,9 @@
 
 		feedback.Info("Building took %v", time.Since(start))
 
-		feedback.Info("Boundary area: %.2fm²", polygonArea)
-
 		numPoints := int(math.Ceil(polygonArea * pointsPerSquareMeter))
 
-		feedback.Info("Generate %d random points for an average density of ~%d points/m².",
+		feedback.Info("Generate %d random points for an average density of ~%.2f points/m².",
 			numPoints, pointsPerSquareMeter)
 
 		start = time.Now()
@@ -554,6 +579,8 @@
 		feedback.Info("Clipping triangles from new mesh.")
 	}
 
+multibeam:
+
 	start = time.Now()
 	tin = tri.Tin()
 	tin.EPSG = epsg
@@ -853,12 +880,6 @@
 		}
 	} else {
 		heights = octree.ExtrapolateClassBreaks(heights, minZ, maxZ)
-		// We set steps for InBetweenClassBreaks to 1, so it
-		// becomes a null operation.  The extra class breaks
-		// were considered unexpected and confusing by the
-		// users.  Once we get filled polygones the visual will
-		// be considerably different anyway. -- sw
-		// heights = octree.InBetweenClassBreaks(heights, 0.05, 1)
 	}
 
 	/*
@@ -888,7 +909,16 @@
 			time.Since(total))
 	}()
 
-	areas := octree.TraceAreas(heights, isoCellSize, tree.Min(), tree.Max(), tree.Value)
+	box := octree.Box2D{
+		X1: tree.Min().X,
+		Y1: tree.Min().Y,
+		X2: tree.Max().X,
+		Y2: tree.Max().Y,
+	}
+
+	raster := octree.NewRaster(box, isoCellSize)
+	raster.Rasterize(tree.Value)
+	areas := raster.Trace(heights)
 
 	return storeAreas(
 		ctx, tx, feedback,
--- a/pkg/imports/wa.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/wa.go	Wed Oct 30 11:31:12 2019 +0100
@@ -45,6 +45,10 @@
 	Password string `json:"password,omitempty"`
 }
 
+func (wa *WaterwayArea) Description() (string, error) {
+	return wa.URL + "|" + wa.FeatureType, nil
+}
+
 // WAJobKind is the import queue type identifier.
 const WAJobKind JobKind = "wa"
 
--- a/pkg/imports/wg.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/wg.go	Wed Oct 30 11:31:12 2019 +0100
@@ -40,6 +40,10 @@
 	Insecure bool `json:"insecure"`
 }
 
+func (wg *WaterwayGauge) Description() (string, error) {
+	return wg.URL, nil
+}
+
 // WGJobKind is the unique name of this import job type.
 const WGJobKind JobKind = "wg"
 
--- a/pkg/imports/wp.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/wp.go	Wed Oct 30 11:31:12 2019 +0100
@@ -59,6 +59,10 @@
 	Password string `json:"password,omitempty"`
 }
 
+func (wp *WaterwayProfiles) Description() (string, error) {
+	return wp.URL + "|" + wp.FeatureType, nil
+}
+
 // WPJobKind is the unique name of this import job type.
 const WPJobKind JobKind = "wp"
 
--- a/pkg/imports/wx.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/imports/wx.go	Wed Oct 30 11:31:12 2019 +0100
@@ -44,6 +44,10 @@
 	Password string `json:"password,omitempty"`
 }
 
+func (wx *WaterwayAxis) Description() (string, error) {
+	return wx.URL + "|" + wx.FeatureType, nil
+}
+
 // WXJobKind is the import queue type identifier.
 const WXJobKind JobKind = "wx"
 
--- a/pkg/models/import.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/models/import.go	Wed Oct 30 11:31:12 2019 +0100
@@ -28,6 +28,7 @@
 		ID       int64      `json:"id"`
 		State    string     `json:"state"`
 		Enqueued ImportTime `json:"enqueued"`
+		Changed  ImportTime `json:"changed"`
 		Kind     string     `json:"kind"`
 		User     string     `json:"user"`
 		Signer   string     `json:"signer,omitempty"`
--- a/pkg/octree/areas.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/octree/areas.go	Wed Oct 30 11:31:12 2019 +0100
@@ -14,16 +14,10 @@
 package octree
 
 import (
-	"log"
-	"math"
 	"runtime"
 	"sync"
-	"time"
-
-	"github.com/fogleman/contourmap"
 
 	"gemma.intevation.de/gemma/pkg/common"
-	"gemma.intevation.de/gemma/pkg/wkb"
 )
 
 func GenerateRandomVertices(
@@ -92,151 +86,3 @@
 	close(out)
 	<-done
 }
-
-func TraceAreas(
-	heights []float64,
-	cellSize float64,
-	min, max Vertex,
-	eval func(float64, float64) (float64, bool),
-) []wkb.MultiPolygonGeom {
-
-	width := max.X - min.X
-	height := max.Y - min.Y
-
-	log.Printf("info: Width/Height: %.2f / %.2f\n", width, height)
-
-	xcells := int(math.Ceil(width / cellSize))
-	ycells := int(math.Ceil(height / cellSize))
-
-	log.Printf("info: Raster size: (%d, %d)\n", xcells, ycells)
-
-	start := time.Now()
-
-	// Add border for closing
-	raster := make([]float64, (xcells+2)*(ycells+2))
-
-	// prefill for no data
-	const nodata = -math.MaxFloat64
-	for i := range raster {
-		raster[i] = nodata
-	}
-
-	// rasterize the height model
-
-	var wg sync.WaitGroup
-
-	rows := make(chan int)
-
-	rasterRow := func() {
-		defer wg.Done()
-		quat := 0.25 * cellSize
-		for i := range rows {
-			pos := (i+1)*(xcells+2) + 1
-			row := raster[pos : pos+xcells]
-			py := min.Y + float64(i)*cellSize + cellSize/2
-			px := min.X + cellSize/2
-			y1 := py - quat
-			y2 := py + quat
-			for j := range row {
-				var n int
-				var sum float64
-
-				if v, ok := eval(px-quat, y1); ok {
-					sum = v
-					n = 1
-				}
-				if v, ok := eval(px-quat, y2); ok {
-					sum += v
-					n++
-				}
-				if v, ok := eval(px+quat, y1); ok {
-					sum += v
-					n++
-				}
-				if v, ok := eval(px+quat, y2); ok {
-					sum += v
-					n++
-				}
-
-				if n > 0 {
-					row[j] = sum / float64(n)
-				}
-				px += cellSize
-			}
-		}
-	}
-
-	for n := runtime.NumCPU(); n >= 1; n-- {
-		wg.Add(1)
-		go rasterRow()
-	}
-
-	for i := 0; i < ycells; i++ {
-		rows <- i
-	}
-	close(rows)
-
-	wg.Wait()
-	log.Printf("info: Rastering took %v\n", time.Since(start))
-
-	start = time.Now()
-
-	tracer := contourmap.FromFloat64s(xcells+2, ycells+2, raster)
-
-	areas := make([]wkb.MultiPolygonGeom, len(heights))
-
-	// TODO: Check if this correct!
-	reprojX := common.Linear(0.5, min.X, 1.5, min.X+cellSize)
-	reprojY := common.Linear(0.5, min.Y, 1.5, min.Y+cellSize)
-
-	cnts := make(chan int)
-
-	doContours := func() {
-		defer wg.Done()
-		for hIdx := range cnts {
-			c := tracer.Contours(heights[hIdx])
-
-			if len(c) == 0 {
-				continue
-			}
-
-			// We need to bring it back to the
-			// none raster coordinate system.
-			a := make(wkb.MultiPolygonGeom, len(c))
-
-			for i, pl := range c {
-				shell := make(wkb.LinearRingGeom, len(pl))
-				for j, pt := range pl {
-					shell[j] = wkb.PointGeom{
-						X: reprojX(pt.X),
-						Y: reprojY(pt.Y),
-					}
-				}
-				/*
-					if !shell.CCW() {
-						log.Println("not ccw")
-						shell.Reverse()
-					}
-				*/
-				a[i] = wkb.PolygonGeom{shell}
-			}
-
-			areas[hIdx] = a
-		}
-	}
-
-	for n := runtime.NumCPU(); n >= 1; n-- {
-		wg.Add(1)
-		go doContours()
-	}
-
-	for i := range heights {
-		cnts <- i
-	}
-	close(cnts)
-
-	wg.Wait()
-	log.Printf("info: Tracing areas took %v\n", time.Since(start))
-
-	return areas
-}
--- a/pkg/octree/classbreaks.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/octree/classbreaks.go	Wed Oct 30 11:31:12 2019 +0100
@@ -98,6 +98,10 @@
 	return ParseClassBreaks(config.String)
 }
 
+func round(v float64) float64 {
+	return math.Round(v*10000) / 10000
+}
+
 func ExtrapolateClassBreaks(cbs []float64, min, max float64) []float64 {
 	if min > max {
 		min, max = max, min
@@ -129,7 +133,7 @@
 			break
 		}
 		m := make([]float64, len(n)+1)
-		m[0] = n[0] - diff
+		m[0] = round(n[0] - diff)
 		copy(m[1:], n)
 		n = m
 	}
@@ -139,40 +143,8 @@
 		if diff == 0 {
 			break
 		}
-		n = append(n, n[len(n)-1]+diff)
+		n = append(n, round(n[len(n)-1]+diff))
 	}
 
 	return n
 }
-
-func InBetweenClassBreaks(cbs []float64, min float64, steps int) []float64 {
-	if len(cbs) < 2 || steps < 2 {
-		return cbs
-	}
-
-	out := make([]float64, 1, len(cbs)*steps)
-
-	out[0] = cbs[0]
-
-	_1steps := 1 / float64(steps)
-
-	for i := 1; i < len(cbs); i++ {
-		last, curr := cbs[i-1], cbs[i]
-
-		// Gap already too small -> proceed with next gap.
-		diff := curr - last
-		if math.Abs(diff) < min {
-			out = append(out, curr)
-			continue
-		}
-
-		delta := diff * _1steps
-		for p := last + delta; p < curr; p += delta {
-			out = append(out, p)
-		}
-
-		out = append(out, curr)
-	}
-
-	return out
-}
--- a/pkg/octree/polygon.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/octree/polygon.go	Wed Oct 30 11:31:12 2019 +0100
@@ -15,13 +15,10 @@
 
 import (
 	"bytes"
-	"context"
-	"database/sql"
 	"encoding/binary"
 	"fmt"
 	"log"
 	"math"
-	"time"
 
 	"github.com/tidwall/rtree"
 
@@ -42,56 +39,11 @@
 )
 
 const (
-	clippingPolygonSQL = `
-WITH joined AS (
-  SELECT
-    sr.area      AS area,
-    sr.date_info AS date_info
-  FROM waterway.sounding_results sr
-  WHERE sr.bottleneck_id = $1
-)
-SELECT ST_AsBinary(
-  ST_Buffer(ST_intersection(
-    (SELECT ST_Transform(area::geometry, $2::int) FROM joined WHERE date_info = $3::date),
-    (SELECT ST_Transform(area::geometry, $2::int) FROM joined WHERE date_info = $4::date)
-  ), 0.1)
-  ) AS area
-`
-)
-
-const (
 	IntersectionInside IntersectionType = iota
 	IntersectionOutSide
 	IntersectionOverlaps
 )
 
-func LoadClippingPolygon(
-	ctx context.Context,
-	conn *sql.Conn,
-	epsg uint32,
-	bottleneck string,
-	first, second time.Time,
-) (*Polygon, error) {
-
-	var clip []byte
-
-	if err := conn.QueryRowContext(
-		ctx, clippingPolygonSQL,
-		bottleneck,
-		epsg,
-		first, second,
-	).Scan(&clip); err != nil {
-		return nil, err
-	}
-
-	var polygon Polygon
-	if err := polygon.FromWKB(clip); err != nil {
-		return nil, err
-	}
-	polygon.Indexify()
-	return &polygon, nil
-}
-
 func (ls lineSegment) Rect(interface{}) ([]float64, []float64) {
 
 	var min, max [2]float64
@@ -281,19 +233,18 @@
 
 	// No intersection -> check inside or outside
 	// if an abritrary point  is inside or not.
-	point := []float64{box.X1, box.Y1}
 
 	// Check holes first: inside a hole means outside.
 	if len(p.rings) > 1 {
 		for _, hole := range p.rings[1:] {
-			if hole.contains(point) {
+			if contains(hole, box.X1, box.Y1) {
 				return IntersectionOutSide
 			}
 		}
 	}
 
 	// Check shell
-	if p.rings[0].contains(point) {
+	if contains(p.rings[0], box.X1, box.Y1) {
 		return IntersectionInside
 	}
 	return IntersectionOutSide
@@ -324,87 +275,110 @@
 	}
 	// No intersection -> check inside or outside
 	// if an abritrary point  is inside or not.
-	point := []float64{t[0].X, t[0].Y}
+	pX, pY := t[0].X, t[0].Y
 
 	// Check holes first: inside a hole means outside.
 	if len(p.rings) > 1 {
 		for _, hole := range p.rings[1:] {
-			if hole.contains(point) {
+			if contains(hole, pX, pY) {
 				return IntersectionOutSide
 			}
 		}
 	}
 
 	// Check shell
-	if p.rings[0].contains(point) {
+	if contains(p.rings[0], pX, pY) {
 		return IntersectionInside
 	}
 	return IntersectionOutSide
 }
 
-func (rng ring) isClosed() bool { return (len(rng) / 2) >= 3 }
+func (rng ring) length() int {
+	return len(rng) / 2
+}
+
+func (rng ring) point(i int) (float64, float64) {
+	i *= 2
+	return rng[i], rng[i+1]
+}
 
-func (rng ring) contains(point []float64) bool {
-	if !rng.isClosed() {
+type segments interface {
+	length() int
+	point(int) (float64, float64)
+}
+
+func contains(s segments, pX, pY float64) bool {
+
+	n := s.length()
+	if n < 3 {
 		return false
 	}
 
-	end := len(rng)/2 - 1
-
-	contains := intersectsWithRaycast(point, rng[:2], rng[end*2:end*2+2])
+	sX, sY := s.point(0)
+	eX, eY := s.point(n - 1)
 
-	for i := 2; i < len(rng); i += 2 {
-		if intersectsWithRaycast(point, rng[i-2:i], rng[i:i+2]) {
-			contains = !contains
-		}
+	const eps = 0.0000001
+
+	if math.Abs(sX-eX) > eps || math.Abs(sY-eY) > eps {
+		// It's not closed!
+		return false
 	}
 
-	return contains
+	var inside bool
+
+	for i := 1; i < n; i++ {
+		eX, eY := s.point(i)
+		if intersectsWithRaycast(pX, pY, sX, sY, eX, eY) {
+			inside = !inside
+		}
+		sX, sY = eX, eY
+	}
+
+	return inside
 }
 
 // Using the raycast algorithm, this returns whether or not the passed in point
 // Intersects with the edge drawn by the passed in start and end points.
 // Original implementation: http://rosettacode.org/wiki/Ray-casting_algorithm#Go
-func intersectsWithRaycast(point, start, end []float64) bool {
+func intersectsWithRaycast(pX, pY, sX, sY, eX, eY float64) bool {
 
 	// Always ensure that the the first point
 	// has a y coordinate that is less than the second point
-	if start[1] > end[1] {
+	if sY > eY {
 		// Switch the points if otherwise.
-		start, end = end, start
+		sX, sY, eX, eY = eX, eY, sX, sY
 	}
 
 	// Move the point's y coordinate
 	// outside of the bounds of the testing region
 	// so we can start drawing a ray
-	for point[1] == start[1] || point[1] == end[1] {
-		y := math.Nextafter(point[1], math.Inf(1))
-		point = []float64{point[0], y}
+	for pY == sY || pY == eY {
+		pY = math.Nextafter(pY, math.Inf(1))
 	}
 
 	// If we are outside of the polygon, indicate so.
-	if point[1] < start[1] || point[1] > end[1] {
+	if pY < sY || pY > eY {
 		return false
 	}
 
-	if start[0] > end[0] {
-		if point[0] > start[0] {
+	if sX > eX {
+		if pX > sX {
 			return false
 		}
-		if point[0] < end[0] {
+		if pX < eX {
 			return true
 		}
 	} else {
-		if point[0] > end[0] {
+		if pX > eX {
 			return false
 		}
-		if point[0] < start[0] {
+		if pX < sX {
 			return true
 		}
 	}
 
-	raySlope := (point[1] - start[1]) / (point[0] - start[0])
-	diagSlope := (end[1] - start[1]) / (end[0] - start[0])
+	raySlope := (pY - sY) / (pX - sX)
+	diagSlope := (eY - sY) / (eX - sX)
 
 	return raySlope >= diagSlope
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pkg/octree/raster.go	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,404 @@
+// This is Free Software under GNU Affero General Public License v >= 3.0
+// without warranty, see README.md and license for details.
+//
+// SPDX-License-Identifier: AGPL-3.0-or-later
+// License-Filename: LICENSES/AGPL-3.0.txt
+//
+// Copyright (C) 2019 by via donau
+//   – Österreichische Wasserstraßen-Gesellschaft mbH
+// Software engineering by Intevation GmbH
+//
+// Author(s):
+//  * Sascha L. Teichmann <sascha.teichmann@intevation.de>
+
+package octree
+
+import (
+	"log"
+	"math"
+	"runtime"
+	"sync"
+	"time"
+
+	"gemma.intevation.de/gemma/pkg/common"
+	"gemma.intevation.de/gemma/pkg/wkb"
+	"github.com/fogleman/contourmap"
+)
+
+type Raster struct {
+	BBox     Box2D
+	CellSize float64
+	XCells   int
+	YCells   int
+	Cells    []float64
+}
+
+const noData = -math.MaxFloat64
+
+func NewRaster(bbox Box2D, cellSize float64) *Raster {
+
+	width, height := bbox.Size()
+
+	log.Printf("info raster extent: %.2f / %.2f", width, height)
+
+	xCells := int(math.Ceil(width / cellSize))
+	yCells := int(math.Ceil(height / cellSize))
+
+	log.Printf("info raster size: %d / %d\n", xCells, yCells)
+
+	size := (xCells + 2) * (yCells + 2)
+	cells := make([]float64, size)
+	for i := range cells {
+		cells[i] = noData
+	}
+	return &Raster{
+		BBox:     bbox,
+		CellSize: cellSize,
+		XCells:   xCells,
+		YCells:   yCells,
+		Cells:    cells,
+	}
+}
+
+func (r *Raster) Rasterize(eval func(float64, float64) (float64, bool)) {
+	var wg sync.WaitGroup
+
+	rows := make(chan int)
+
+	rasterRow := func() {
+		defer wg.Done()
+		quat := 0.25 * r.CellSize
+		for i := range rows {
+			pos := (i+1)*(r.XCells+2) + 1
+			row := r.Cells[pos : pos+r.XCells]
+			py := r.BBox.Y1 + float64(i)*r.CellSize + r.CellSize/2
+			px := r.BBox.X1 + r.CellSize/2
+			y1 := py - quat
+			y2 := py + quat
+			for j := range row {
+				var n int
+				var sum float64
+
+				if v, ok := eval(px-quat, y1); ok {
+					sum = v
+					n = 1
+				}
+				if v, ok := eval(px-quat, y2); ok {
+					sum += v
+					n++
+				}
+				if v, ok := eval(px+quat, y1); ok {
+					sum += v
+					n++
+				}
+				if v, ok := eval(px+quat, y2); ok {
+					sum += v
+					n++
+				}
+
+				if n > 0 {
+					row[j] = sum / float64(n)
+				}
+				px += r.CellSize
+			}
+		}
+	}
+
+	for n := runtime.NumCPU(); n >= 1; n-- {
+		wg.Add(1)
+		go rasterRow()
+	}
+
+	for i := 0; i < r.YCells; i++ {
+		rows <- i
+	}
+	close(rows)
+}
+
+func (r *Raster) Diff(eval func(float64, float64) (float64, bool)) {
+	var wg sync.WaitGroup
+
+	rows := make(chan int)
+
+	rasterRow := func() {
+		defer wg.Done()
+		quat := 0.25 * r.CellSize
+		for i := range rows {
+			pos := (i+1)*(r.XCells+2) + 1
+			row := r.Cells[pos : pos+r.XCells]
+			py := r.BBox.Y1 + float64(i)*r.CellSize + r.CellSize/2
+			px := r.BBox.X1 + r.CellSize/2
+			y1 := py - quat
+			y2 := py + quat
+			for j, old := range row {
+				// only diff where values
+				if old == noData {
+					px += r.CellSize
+					continue
+				}
+				var n int
+				var sum float64
+
+				if v, ok := eval(px-quat, y1); ok {
+					sum = v
+					n = 1
+				}
+				if v, ok := eval(px-quat, y2); ok {
+					sum += v
+					n++
+				}
+				if v, ok := eval(px+quat, y1); ok {
+					sum += v
+					n++
+				}
+				if v, ok := eval(px+quat, y2); ok {
+					sum += v
+					n++
+				}
+
+				if n > 0 {
+					row[j] -= sum / float64(n)
+				} else {
+					row[j] = noData
+				}
+
+				px += r.CellSize
+			}
+		}
+	}
+
+	for n := runtime.NumCPU(); n >= 1; n-- {
+		wg.Add(1)
+		go rasterRow()
+	}
+
+	for i := 0; i < r.YCells; i++ {
+		rows <- i
+	}
+	close(rows)
+}
+
+func (r *Raster) ZExtent() (float64, float64, bool) {
+	min, max := math.MaxFloat64, -math.MaxFloat64
+	for _, v := range r.Cells {
+		if v == noData {
+			continue
+		}
+		if v < min {
+			min = v
+		}
+		if v > max {
+			max = v
+		}
+	}
+	return min, max, min != math.MaxFloat64
+}
+
+func (r *Raster) Trace(heights []float64) []wkb.MultiPolygonGeom {
+	start := time.Now()
+
+	tracer := contourmap.FromFloat64s(r.XCells+2, r.YCells+2, r.Cells)
+
+	areas := make([]wkb.MultiPolygonGeom, len(heights))
+
+	reprojX := common.Linear(0.5, r.BBox.X1, 1.5, r.BBox.X1+r.CellSize)
+	reprojY := common.Linear(0.5, r.BBox.Y1, 1.5, r.BBox.Y1+r.CellSize)
+
+	var wg sync.WaitGroup
+
+	cnts := make(chan int)
+
+	doContours := func() {
+		defer wg.Done()
+		for hIdx := range cnts {
+			if c := tracer.Contours(heights[hIdx]); len(c) > 0 {
+				areas[hIdx] = buildMultipolygon(c, reprojX, reprojY)
+			}
+		}
+	}
+
+	for n := runtime.NumCPU(); n >= 1; n-- {
+		wg.Add(1)
+		go doContours()
+	}
+
+	for i := range heights {
+		cnts <- i
+	}
+	close(cnts)
+
+	wg.Wait()
+	log.Printf("info: Tracing areas took %v\n", time.Since(start))
+
+	return areas
+}
+
+type contour []contourmap.Point
+
+type bboxNode struct {
+	box      Box2D
+	cnt      contour
+	children []*bboxNode
+}
+
+func (cnt contour) contains(o contour) bool {
+	return contains(cnt, o[0].X, o[0].Y) ||
+		contains(cnt, o[len(o)/2].X, o[len(o)/2].Y)
+}
+
+func (cnt contour) length() int {
+	return len(cnt)
+}
+
+func (cnt contour) point(i int) (float64, float64) {
+	return cnt[i].X, cnt[i].Y
+}
+
+func (cnt contour) bbox() Box2D {
+	minX, minY := math.MaxFloat64, math.MaxFloat64
+	maxX, maxY := -math.MaxFloat64, -math.MaxFloat64
+
+	for _, p := range cnt {
+		if p.X < minX {
+			minX = p.X
+		}
+		if p.X > maxX {
+			maxX = p.X
+		}
+		if p.Y < minY {
+			minY = p.Y
+		}
+		if p.Y > maxY {
+			maxY = p.Y
+		}
+	}
+	return Box2D{X1: minX, X2: maxX, Y1: minY, Y2: maxY}
+}
+
+func (bn *bboxNode) insert(cnt contour, box Box2D) {
+	// check if children are inside new
+	var nr *bboxNode
+
+	for i, r := range bn.children {
+		if r.box.Inside(box) && cnt.contains(r.cnt) {
+			if nr == nil {
+				nr = &bboxNode{box: box, cnt: cnt}
+			}
+			nr.children = append(nr.children, r)
+			bn.children[i] = nil
+		}
+	}
+
+	// we have a new child
+	if nr != nil {
+		// compact the list
+		for i := len(bn.children) - 1; i >= 0; i-- {
+			if bn.children[i] == nil {
+				if i < len(bn.children)-1 {
+					copy(bn.children[i:], bn.children[i+1:])
+				}
+				bn.children[len(bn.children)-1] = nil
+				bn.children = bn.children[:len(bn.children)-1]
+			}
+		}
+		bn.children = append(bn.children, nr)
+		return
+	}
+
+	// check if new is inside an old
+	for _, r := range bn.children {
+		if box.Inside(r.box) && r.cnt.contains(cnt) {
+			r.insert(cnt, box)
+			return
+		}
+	}
+
+	// its a new child node.
+	nr = &bboxNode{box: box, cnt: cnt}
+	bn.children = append(bn.children, nr)
+}
+
+func (bn *bboxNode) insertRoot(cnt contour) {
+	bn.insert(cnt, cnt.bbox())
+}
+
+type bboxOutFunc func(contour, []contour)
+
+func (bn *bboxNode) generate(out bboxOutFunc) {
+
+	var grands []*bboxNode
+
+	holes := make([]contour, len(bn.children))
+
+	for i, ch := range bn.children {
+		holes[i] = ch.cnt
+		grands = append(grands, ch.children...)
+	}
+	out(bn.cnt, holes)
+
+	// the grand children are new polygons.
+	for _, grand := range grands {
+		grand.generate(out)
+	}
+}
+
+func (bn *bboxNode) generateRoot(out bboxOutFunc) {
+	for _, r := range bn.children {
+		r.generate(out)
+	}
+}
+
+func buildMultipolygon(
+	cnts []contourmap.Contour,
+	reprojX, reprojY func(float64) float64,
+) wkb.MultiPolygonGeom {
+
+	var forest bboxNode
+
+	for _, cnt := range cnts {
+		forest.insertRoot(contour(cnt))
+	}
+
+	//log.Printf("cnts: %d roots: %d\n", len(cnts), len(bf.roots))
+
+	var mp wkb.MultiPolygonGeom
+
+	out := func(sh contour, hls []contour) {
+
+		polygon := make(wkb.PolygonGeom, 1+len(hls))
+
+		// Handle shell
+		shell := make(wkb.LinearRingGeom, len(sh))
+		for j, pt := range sh {
+			shell[j] = wkb.PointGeom{
+				X: reprojX(pt.X),
+				Y: reprojY(pt.Y),
+			}
+		}
+		if shell.CCW() {
+			shell.Reverse()
+		}
+		polygon[0] = shell
+
+		// handle holes
+		for i, hl := range hls {
+			hole := make(wkb.LinearRingGeom, len(hl))
+			for j, pt := range hl {
+				hole[j] = wkb.PointGeom{
+					X: reprojX(pt.X),
+					Y: reprojY(pt.Y),
+				}
+			}
+			if !hole.CCW() {
+				hole.Reverse()
+			}
+			polygon[1+i] = hole
+		}
+
+		mp = append(mp, polygon)
+	}
+
+	forest.generateRoot(out)
+
+	return mp
+}
--- a/pkg/octree/simplify.go	Wed Oct 23 12:37:01 2019 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,202 +0,0 @@
-// This is Free Software under GNU Affero General Public License v >= 3.0
-// without warranty, see README.md and license for details.
-//
-// SPDX-License-Identifier: AGPL-3.0-or-later
-// License-Filename: LICENSES/AGPL-3.0.txt
-//
-// Copyright (C) 2019 by via donau
-//   – Österreichische Wasserstraßen-Gesellschaft mbH
-// Software engineering by Intevation GmbH
-//
-// Author(s):
-//  * Sascha L. Teichmann <sascha.teichmann@intevation.de>
-
-package octree
-
-import (
-	"math"
-)
-
-func handleTriangle(
-	t *Triangle,
-	maxDist, tolerance float64,
-	maxIdx int,
-	points MultiPointZ,
-	result *MultiPointZ,
-) bool {
-	if maxDist <= tolerance {
-		return false
-	}
-
-	if len(points) == 1 {
-		*result = append(*result, points[0])
-		return true
-	}
-
-	var (
-		tris     [3]Triangle
-		planes   [3]Plane3D
-		maxDists [3]float64
-		maxIdxs  [3]int
-		parts    [3]MultiPointZ
-	)
-
-	top := points[maxIdx]
-	for i := 0; i < 3; i++ {
-		tris[i] = Triangle{t[i], t[(i+1)%3], top}
-		planes[i] = tris[i].Plane3D()
-	}
-
-nextPoint:
-	for i, v := range points {
-		if i == maxIdx {
-			continue
-		}
-
-		for j := range tris {
-			if tris[j].Contains(v.X, v.Y) {
-				if dist := math.Abs(planes[j].Eval(v)); dist > maxDists[j] {
-					maxDists[j] = dist
-					maxIdxs[j] = len(parts[j])
-				}
-				parts[j] = append(parts[j], v)
-				continue nextPoint
-			}
-		}
-	}
-
-	var found bool
-	for i, part := range parts {
-		if len(part) > 0 && handleTriangle(
-			&tris[i],
-			maxDists[i], tolerance,
-			maxIdxs[i],
-			part,
-			result,
-		) {
-			found = true
-		}
-	}
-
-	if found {
-		*result = append(*result, top)
-	}
-
-	return found
-}
-
-func (points MultiPointZ) Simplify(tolerance float64) MultiPointZ {
-
-	if len(points) < 2 {
-		return points
-	}
-
-	if tolerance < 0 {
-		tolerance = -tolerance
-	}
-
-	min := Vertex{X: math.MaxFloat64, Y: math.MaxFloat64, Z: math.MaxFloat64}
-	max := Vertex{X: -math.MaxFloat64, Y: -math.MaxFloat64, Z: -math.MaxFloat64}
-
-	var maxIdx int
-
-	for i, v := range points {
-		min.Minimize(v)
-
-		if v.X < min.X {
-			min.X = v.X
-		}
-		if v.X > max.X {
-			max.X = v.X
-		}
-		if v.Y < min.Y {
-			min.Y = v.Y
-		}
-		if v.Y > max.Y {
-			max.Y = v.Y
-		}
-		if v.Z < min.Z {
-			min.Z = v.Z
-		}
-		if v.Z > max.Z {
-			max.Z = v.Z
-			maxIdx = i
-		}
-
-		max.Maximize(v)
-	}
-
-	/*
-		log.Printf("(%.5f, %.5f, %.5f) - (%.5f, %.5f, %.5f)\n",
-			min.X, min.Y, min.Z,
-			max.X, max.Y, max.Z)
-	*/
-
-	below := min.Z - 3*tolerance
-	xMin := min.X - tolerance
-	xMax := max.X + tolerance
-	yMin := min.Y - tolerance
-	yMax := max.Y + tolerance
-
-	corners := []Vertex{
-		{xMin, yMin, below},
-		{xMax, yMin, below},
-		{xMax, yMax, below},
-		{xMin, yMax, below},
-	}
-
-	top := points[maxIdx]
-
-	tris := make([]Triangle, len(corners))
-	planes := make([]Plane3D, len(corners))
-
-	for i, v1 := range corners {
-		v2 := corners[(i+1)%len(corners)]
-		tris[i] = Triangle{v1, v2, top}
-		planes[i] = tris[i].Plane3D()
-	}
-
-	parts := make([][]Vertex, len(tris))
-
-	maxDists := make([]float64, len(planes))
-	maxIdxs := make([]int, len(planes))
-
-nextPoint:
-	for i, v := range points {
-		if i == maxIdx {
-			continue
-		}
-
-		for j := range tris {
-			if tris[j].Contains(v.X, v.Y) {
-				if dist := math.Abs(planes[j].Eval(v)); dist > maxDists[j] {
-					maxDists[j] = dist
-					maxIdxs[j] = len(parts[j])
-				}
-				parts[j] = append(parts[j], v)
-				continue nextPoint
-			}
-		}
-	}
-
-	result := make(MultiPointZ, 0, len(points))
-
-	var found bool
-	for i, part := range parts {
-		if len(part) > 0 && handleTriangle(
-			&tris[i],
-			maxDists[i], tolerance,
-			maxIdxs[i],
-			part,
-			&result,
-		) {
-			found = true
-		}
-	}
-
-	if found {
-		result = append(result, top)
-	}
-
-	return result
-}
--- a/pkg/octree/slice.go	Wed Oct 23 12:37:01 2019 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,105 +0,0 @@
-// This is Free Software under GNU Affero General Public License v >= 3.0
-// without warranty, see README.md and license for details.
-//
-// SPDX-License-Identifier: AGPL-3.0-or-later
-// License-Filename: LICENSES/AGPL-3.0.txt
-//
-// Copyright (C) 2018, 2019 by via donau
-//   – Österreichische Wasserstraßen-Gesellschaft mbH
-// Software engineering by Intevation GmbH
-//
-// Author(s):
-//  * Sascha L. Teichmann <sascha.teichmann@intevation.de>
-
-package octree
-
-import (
-	"runtime"
-	"sync"
-)
-
-type PointMap map[Point]float64
-
-func (pm PointMap) Triangulate() (*Triangulation, error) {
-	vertices := make([]Vertex, len(pm))
-	var i int
-	for p, z := range pm {
-		vertices[i] = Vertex{X: p.X, Y: p.Y, Z: z}
-		i++
-	}
-	return Triangulate(vertices)
-}
-
-func sliceWork(
-	vs []Vertex,
-	dst PointMap,
-	fn func([]Vertex, func([]Vertex) []Vertex),
-) {
-	n := runtime.NumCPU()
-
-	wg := new(sync.WaitGroup)
-
-	slices := make(chan []Vertex)
-	out := make(chan []Vertex)
-
-	pool := make(chan []Vertex, n)
-
-	const pageSize = 2048
-
-	turn := func(p []Vertex) []Vertex {
-		if p != nil {
-			out <- p
-		}
-		select {
-		case p = <-pool:
-		default:
-			p = make([]Vertex, 0, pageSize)
-		}
-		return p
-	}
-
-	for i := 0; i < n; i++ {
-		wg.Add(1)
-		go func() {
-			defer wg.Done()
-			for slice := range slices {
-				fn(slice, turn)
-			}
-		}()
-	}
-	done := make(chan struct{})
-	go func() {
-		defer close(done)
-		for s := range out {
-			for i := range s {
-				v := &s[i]
-				key := Point{X: v.X, Y: v.Y}
-				if z, found := dst[key]; found {
-					dst[key] = (z + v.Z) * 0.5
-				} else {
-					dst[key] = v.Z
-				}
-			}
-			select {
-			case pool <- s[:0:pageSize]:
-			default:
-			}
-		}
-	}()
-
-	size := len(vs)/n + 1
-	for len(vs) > 0 {
-		var l int
-		if len(vs) < size {
-			l = len(vs)
-		} else {
-			l = size
-		}
-		slices <- vs[:l]
-		vs = vs[l:]
-	}
-	close(slices)
-	wg.Wait()
-	close(out)
-	<-done
-}
--- a/pkg/octree/strtree.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/octree/strtree.go	Wed Oct 30 11:31:12 2019 +0100
@@ -484,51 +484,3 @@
 	}
 	return int32(-(pos + 1))
 }
-
-func (s *STRTree) Diff(other *STRTree) PointMap {
-
-	firstVs, secondVs := s.tin.Vertices, other.tin.Vertices
-
-	result := make(PointMap, len(firstVs)+len(secondVs))
-
-	sliceWork(
-		firstVs,
-		result,
-		func(slice []Vertex, turn func([]Vertex) []Vertex) {
-			p := turn(nil)
-			for i := range slice {
-				v := &slice[i]
-				if z, found := other.Value(v.X, v.Y); found {
-					p = append(p, Vertex{v.X, v.Y, v.Z - z})
-					if len(p) == cap(p) {
-						p = turn(p)
-					}
-				}
-			}
-			if len(p) > 0 {
-				turn(p)
-			}
-		})
-
-	sliceWork(
-		secondVs,
-		result,
-		func(
-			slice []Vertex, turn func([]Vertex) []Vertex) {
-			p := turn(nil)
-			for i := range slice {
-				v := &slice[i]
-				if z, found := s.Value(v.X, v.Y); found {
-					p = append(p, Vertex{v.X, v.Y, z - v.Z})
-					if len(p) == cap(p) {
-						p = turn(p)
-					}
-				}
-			}
-			if len(p) > 0 {
-				turn(p)
-			}
-		})
-
-	return result
-}
--- a/pkg/octree/vertex.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/octree/vertex.go	Wed Oct 30 11:31:12 2019 +0100
@@ -108,6 +108,21 @@
 	}
 }
 
+func (a Box2D) Inside(b Box2D) bool {
+	return a.X1 >= b.X1 && a.X2 <= b.X2 &&
+		a.Y1 >= b.Y1 && a.Y2 <= b.Y2
+}
+
+func (a Box2D) Size() (float64, float64) {
+	return a.X2 - a.X1, a.Y2 - a.Y1
+}
+
+func (a Box2D) Empty() bool {
+	const eps = 0.0000001
+	return math.Abs(a.X2-a.X1) < eps &&
+		math.Abs(a.Y2-a.Y1) < eps
+}
+
 func (p Plane3D) Z(x, y float64) float64 {
 	// p.A*x + p.B*y + p.C*z + p.D = 0
 	return -(p.A*x + p.B*y + p.D) / p.C
--- a/pkg/pgxutils/errors.go	Wed Oct 23 12:37:01 2019 +0200
+++ b/pkg/pgxutils/errors.go	Wed Oct 30 11:31:12 2019 +0100
@@ -26,7 +26,9 @@
 	foreignKeyViolation      = "23503"
 	uniqueViolation          = "23505"
 	checkViolation           = "23514"
+	exclusionViolation       = "23P01"
 	violatesRowLevelSecurity = "42501"
+	duplicateObject          = "42710"
 	noDataFound              = "P0002"
 )
 
@@ -59,6 +61,9 @@
 
 	c = http.StatusInternalServerError
 
+	// Most recent line from stacktrace contains failed statement
+	recent := strings.SplitN(err.Where, "\n", 1)[0]
+
 	switch err.Code {
 	case notNullViolation:
 		switch err.SchemaName {
@@ -94,12 +99,35 @@
 		}
 	case uniqueViolation:
 		switch err.SchemaName {
-		case "internal":
+		case "users":
+			switch err.TableName {
+			case "stretches":
+				switch err.ConstraintName {
+				case "stretches_name_staging_done_key":
+					m = "A stretch with that name already exists"
+					c = http.StatusConflict
+					return
+				}
+			}
+		case "waterway":
 			switch err.TableName {
-			case "user_profiles":
+			case "sections":
 				switch err.ConstraintName {
-				case "user_profiles_pkey":
-					m = "A user with that name already exists"
+				case "sections_name_staging_done_key":
+					m = "A section with that name already exists"
+					c = http.StatusConflict
+					return
+				}
+			}
+		}
+	case exclusionViolation:
+		switch err.SchemaName {
+		case "waterway":
+			switch err.TableName {
+			case "sections":
+				switch err.ConstraintName {
+				case "sections_name_country_excl":
+					m = "A section with that name already exists for another country"
 					c = http.StatusConflict
 					return
 				}
@@ -118,9 +146,14 @@
 				}
 			}
 		}
+	case duplicateObject:
+		switch {
+		case strings.Contains(recent, "CREATE ROLE"):
+			m = "A user with that name already exists"
+			c = http.StatusConflict
+			return
+		}
 	case noDataFound:
-		// Most recent line from stacktrace contains name of failed function
-		recent := strings.SplitN(err.Where, "\n", 1)[0]
 		switch {
 		case strings.Contains(recent, "isrsrange_points"):
 			m = "No distance mark found for at least one given ISRS Location Code"
--- a/schema/auth.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/auth.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -127,31 +127,52 @@
 -- Staging area
 -- TODO: add all relevant tables here
 
+-- In many cases it is more efficient to check for "staging_done" to
+-- prevent the more expensive checks for read only access (which is
+-- allowed for all users, when staging is done).
 CREATE POLICY same_country ON waterway.gauge_measurements
     FOR ALL TO waterway_admin
-    USING ((location).country_code
-        = (SELECT country FROM users.list_users WHERE username = current_user)
-    );
+    USING (staging_done
+           OR (location).country_code =
+               (SELECT country FROM users.list_users
+                WHERE username = current_user))
+    WITH CHECK ((location).country_code =
+                 (SELECT country FROM users.list_users
+                  WHERE username = current_user));
 
 CREATE POLICY same_country ON waterway.waterway_profiles
     FOR ALL TO waterway_admin
-    USING ((location).country_code = (SELECT country FROM users.list_users WHERE username = current_user));
+    USING (staging_done
+           OR (location).country_code =
+               (SELECT country FROM users.list_users
+                WHERE username = current_user))
+    WITH CHECK ((location).country_code =
+                (SELECT country FROM users.list_users
+                 WHERE username = current_user));
 
 CREATE POLICY responsibility_area ON waterway.bottlenecks
     FOR ALL TO waterway_admin
-    USING (users.utm_covers(area));
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
 
 CREATE POLICY responsibility_area ON waterway.sounding_results
     FOR ALL TO waterway_admin
-    USING (users.utm_covers(area));
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
 
 CREATE POLICY responsibility_area ON waterway.fairway_dimensions
     FOR ALL TO waterway_admin
-    USING (users.utm_covers(area));
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
 
-CREATE POLICY responsibility_area ON waterway.sections
+-- In the case of sections differentiating between read and write
+-- access is not neccessary: the country code based access check is
+-- quiet cheap in this case and there are only (relatively) few
+-- sections in the system anyway.
+CREATE POLICY same_country ON waterway.sections
     FOR ALL TO waterway_admin
-    USING (users.utm_covers(area));
+    USING (country = (
+        SELECT country FROM users.list_users WHERE username = current_user));
 
 CREATE POLICY sys_admin ON users.stretches
     FOR ALL TO sys_admin
--- a/schema/auth_tests.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/auth_tests.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -4,7 +4,7 @@
 -- SPDX-License-Identifier: AGPL-3.0-or-later
 -- License-Filename: LICENSES/AGPL-3.0.txt
 
--- Copyright (C) 2018 by via donau
+-- Copyright (C) 2018, 2019 by via donau
 --   – Österreichische Wasserstraßen-Gesellschaft mbH
 -- Software engineering by Intevation GmbH
 
@@ -15,17 +15,6 @@
 -- pgTAP test script for privileges and RLS policies
 --
 
--- Helper function:
-CREATE OR REPLACE FUNCTION users.current_user_country()
-    RETURNS internal.user_profiles.country%TYPE
-    AS $$
-        SELECT country FROM users.list_users
-            WHERE username = current_user
-    $$
-    LANGUAGE SQL
-    STABLE PARALLEL SAFE;
-
-
 CREATE FUNCTION test_privs() RETURNS SETOF TEXT AS
 $$
 DECLARE the_schema CONSTANT varchar = 'waterway';
@@ -75,7 +64,7 @@
     'User should see templates associated to his country');
 
 SELECT ok(
-    users.current_user_country() = ALL(
+    users.user_country() = ALL(
         SELECT country FROM users.templates),
     'User should only see templates associated to his country');
 
@@ -150,6 +139,31 @@
     'Waterway admin cannot delete templates for other country');
 
 -- import management
+SET SESSION AUTHORIZATION test_sys_admin1;
+SELECT lives_ok($$
+    WITH
+    job AS (
+        INSERT INTO import.imports (kind, username, data) VALUES (
+            'test', current_user, 'test') RETURNING id),
+    log AS (
+        INSERT INTO import.import_logs (import_id, msg)
+            SELECT id, 'test' FROM job)
+    INSERT INTO import.track_imports (import_id, relation, key)
+        SELECT id, 'waterway.bottlenecks', 2 FROM job
+    $$,
+    'Sys_admin can add import job and related data');
+
+SELECT lives_ok($$
+    WITH
+    config AS (
+        INSERT INTO import.import_configuration (kind, username) VALUES (
+            'test', current_user) RETURNING id)
+    INSERT INTO import.import_configuration_attributes
+        SELECT id, 'test key', 'test value' FROM config
+    $$,
+    'Sys_admin can add import config and related data');
+
+SET SESSION AUTHORIZATION test_admin_at;
 SELECT lives_ok($$
     WITH
     job AS (
@@ -178,6 +192,20 @@
     $$,
     'Waterway admin can edit import jobs from his country only');
 
+SELECT bag_hasnt($$
+    WITH job AS (
+        UPDATE import.imports SET state = 'accepted'
+            RETURNING id, username),
+    log AS (
+        INSERT INTO import.import_logs (import_id, msg)
+            SELECT id, 'test continued' FROM job)
+    SELECT username FROM job
+    $$,
+    $$
+    SELECT username FROM users.list_users WHERE rolname = 'sys_admin'
+    $$,
+    'Waterway admin cannot edit import jobs of sys_admins');
+
 SELECT lives_ok($$
     WITH
     config AS (
@@ -201,11 +229,29 @@
             SELECT id, 'test continued', 'test value' FROM config),
     attrib_upd AS (
         UPDATE import.import_configuration_attributes SET v = 'test v'
-            WHERE import_configuration_id = (SELECT id FROM config))
+            WHERE import_configuration_id IN (SELECT id FROM config))
     SELECT username FROM config
     $$,
     'Waterway admin can edit import config from his country only');
 
+SELECT bag_hasnt($$
+    WITH config AS (
+        UPDATE import.import_configuration SET kind = 'test'
+            RETURNING id, username),
+    attrib AS (
+        INSERT INTO import.import_configuration_attributes
+            SELECT id, 'test continued 1', 'test value' FROM config),
+    attrib_upd AS (
+        UPDATE import.import_configuration_attributes SET v = 'test v'
+            WHERE import_configuration_id IN (SELECT id FROM config))
+    SELECT username FROM config
+    $$,
+    $$
+    SELECT username FROM users.list_users
+        WHERE rolname = 'sys_admin' AND country = 'RO'
+    $$,
+    'Waterway admin cannot edit import config from sys_admin');
+
 SET SESSION AUTHORIZATION test_admin_ro;
 SELECT throws_ok($$
     INSERT INTO import.import_logs (import_id, msg)
--- a/schema/default_sysconfig.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/default_sysconfig.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -128,6 +128,7 @@
             s.area,
             s.objnam,
             s.nobjnam,
+            s.country,
             s.date_info,
             s.source_organization,
             s.staging_done,
@@ -188,7 +189,7 @@
     $$),
     ('waterway', 'sounding_results_areas_geoserver', 4326, NULL, $$
         SELECT bottleneck_id,
-            date_info,
+            to_char(date_info, 'YYYY-MM-DD') AS date_info,
             height,
             areas,
             surtyp
@@ -286,8 +287,8 @@
 INSERT INTO sys_admin.system_config VALUES ('gm_forecast_offset_72h', 15);
 INSERT INTO sys_admin.system_config VALUES ('gm_forecast_vs_reality_nsc_24h', -12.5);
 INSERT INTO sys_admin.system_config VALUES ('gm_forecast_vs_reality_nsc_72h', -12.5);
-INSERT INTO sys_admin.system_config VALUES ('morphology_classbreaks', '1:#ff00dd,1.5,1.7,1.9,2.1,2.3,2.5:#f25f20,2.7,2.9,3.1:#f7e40e,3.3,3.5,4:#8ad51a,4.5,5,5.5,6,6.5,7:#1414ff');
-INSERT INTO sys_admin.system_config VALUES ('morphology_classbreaks_compare', '-2:#06b100,-1.9,-1.8,-1.7,-1.6,-1.5,-1.4,-1.3,-1.2,-1.1,-1:#1cc68e,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,-0.1,0:#c2c2c2,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1:#fff01a,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2:#f80012');
+INSERT INTO sys_admin.system_config VALUES ('morphology_classbreaks', '-2.5:#5A3300,-2,-1.5,-1,-0.5,0:#D8B050,0.5:#FF0000,1,1.5,2:#FF9999,2.5:#A6B8FA,3,3.5,4,4.5,5,6:#0101FF,7:#88DD55,8,9,10,11,12:#146E33');
+INSERT INTO sys_admin.system_config VALUES ('morphology_classbreaks_compare', '-2:#06b100,-1.8,-1.6,-1.4,-1.2,-1:#1cc68e,-0.8,-0.6,-0.4,-0.2,0:#c2c2c2,0.2,0.4,0.6,0.8,1:#fff01a,1.2,1.4,1.6,1.8,2:#f80012');
 INSERT INTO sys_admin.system_config VALUES ('bottlenecks_stroke','#fa28ff');
 INSERT INTO sys_admin.system_config VALUES ('bottlenecks_fill','#ff25c424');
 INSERT INTO sys_admin.system_config VALUES ('stretches_stroke','#fac800cc');
--- a/schema/gemma.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/gemma.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -350,13 +350,14 @@
 CREATE SCHEMA internal
     -- Profile data are only accessible via the view users.list_users.
     CREATE TABLE user_profiles (
-        username varchar PRIMARY KEY CHECK(octet_length(username) <= 63),
+        username varchar PRIMARY KEY
+            CHECK(octet_length(username) <= 63)
+            CHECK(to_regrole(quote_ident(username)) IS NOT NULL),
         -- keep username length compatible with role identifier
         country char(2) NOT NULL REFERENCES countries,
         map_extent box2d NOT NULL,
         email_address varchar NOT NULL
     )
-    -- Columns referencing user-visible schemas added below.
 ;
 
 
@@ -439,9 +440,44 @@
     )
     CREATE TRIGGER templates_date_info BEFORE UPDATE ON templates
         FOR EACH ROW EXECUTE PROCEDURE update_date_info()
+
+    CREATE VIEW list_users WITH (security_barrier) AS
+        SELECT
+            r.rolname,
+            p.username,
+            CAST('' AS varchar) AS pw,
+            p.country,
+            p.map_extent,
+            p.email_address
+        FROM internal.user_profiles p
+            JOIN pg_roles u ON p.username = u.rolname
+            JOIN pg_auth_members a ON u.oid = a.member
+            JOIN pg_roles r ON a.roleid = r.oid
+        WHERE p.username = current_user
+            OR pg_has_role('waterway_admin', 'MEMBER')
+                AND p.country = (
+                    SELECT country FROM internal.user_profiles
+                        WHERE username = current_user)
+                AND r.rolname <> 'sys_admin'
+            OR pg_has_role('sys_admin', 'MEMBER')
 ;
 
 
+--
+-- Functions to be used in DEFAULT expresions
+--
+
+-- Return current_user's country code
+CREATE FUNCTION users.user_country(user_name name DEFAULT current_user)
+    RETURNS internal.user_profiles.country%TYPE
+    AS $$
+        SELECT country FROM users.list_users
+            WHERE username = user_name
+    $$
+    LANGUAGE SQL
+    STABLE PARALLEL SAFE;
+
+
 -- Namespace for waterway data that can change in a running system
 CREATE SCHEMA waterway
 
@@ -588,9 +624,14 @@
             CHECK(ST_IsValid(CAST(area AS geometry))),
         objnam varchar NOT NULL,
         nobjnam varchar,
+        country char(2) NOT NULL REFERENCES countries
+            DEFAULT users.user_country(),
         date_info timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP,
         source_organization varchar NOT NULL,
         staging_done boolean NOT NULL DEFAULT false,
+        -- Disallow the same name for different countries
+        EXCLUDE USING GiST (name WITH =, country WITH <>),
+        -- Allow the same name one time in and outside staging area, each
         UNIQUE(name, staging_done)
     )
     CREATE TRIGGER sections_date_info
@@ -827,6 +868,7 @@
         state      import_state NOT NULL DEFAULT 'queued',
         kind       varchar   NOT NULL,
         enqueued   timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP,
+        changed    timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP,
         due        timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP,
         retry_wait interval
             CHECK(retry_wait IS NULL
--- a/schema/manage_users.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/manage_users.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -38,26 +38,6 @@
     LANGUAGE plpgsql;
 
 
-CREATE OR REPLACE VIEW users.list_users WITH (security_barrier) AS
-    SELECT
-            r.rolname,
-            p.username,
-            CAST('' AS varchar) AS pw,
-            p.country,
-            p.map_extent,
-            p.email_address
-        FROM internal.user_profiles p
-            JOIN pg_roles u ON p.username = u.rolname
-            JOIN pg_auth_members a ON u.oid = a.member
-            JOIN pg_roles r ON a.roleid = r.oid
-        WHERE p.username = current_user
-            OR pg_has_role('waterway_admin', 'MEMBER')
-                AND p.country = (
-                    SELECT country FROM internal.user_profiles
-                        WHERE username = current_user)
-            OR pg_has_role('sys_admin', 'MEMBER');
-
-
 CREATE OR REPLACE FUNCTION users.current_user_area_utm()
     RETURNS geometry
     AS $$
@@ -99,14 +79,20 @@
                 JOIN users.stretch_countries stc ON stc.stretch_id = st.id
             WHERE stc.country = NEW.country;
     END IF;
+
+    IF NEW.username IS NOT NULL
+    -- otherwise let the constraint on user_profiles speak
+    THEN
+        EXECUTE format(
+            'CREATE ROLE %I IN ROLE %I LOGIN PASSWORD %L',
+            NEW.username,
+            NEW.rolname,
+            internal.check_password(NEW.pw));
+    END IF;
+
     INSERT INTO internal.user_profiles (
         username, country, map_extent, email_address)
         VALUES (NEW.username, NEW.country, NEW.map_extent, NEW.email_address);
-    EXECUTE format(
-        'CREATE ROLE %I IN ROLE %I LOGIN PASSWORD %L',
-        NEW.username,
-        NEW.rolname,
-        internal.check_password(NEW.pw));
 
     -- Do not leak new password
     NEW.pw = '';
@@ -167,11 +153,6 @@
 BEGIN
     cur_username = OLD.username;
 
-    UPDATE internal.user_profiles p
-        SET (username, country, map_extent, email_address)
-        = (NEW.username, NEW.country, NEW.map_extent, NEW.email_address)
-        WHERE p.username = cur_username;
-
     IF NEW.username <> cur_username
     THEN
         EXECUTE format(
@@ -179,6 +160,11 @@
         cur_username = NEW.username;
     END IF;
 
+    UPDATE internal.user_profiles p
+        SET (username, country, map_extent, email_address)
+        = (NEW.username, NEW.country, NEW.map_extent, NEW.email_address)
+        WHERE p.username = cur_username;
+
     IF NEW.rolname <> OLD.rolname
     THEN
         EXECUTE format(
--- a/schema/manage_users_tests.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/manage_users_tests.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -28,7 +28,7 @@
     SELECT best_utm(ST_Collect(area::geometry))
         FROM users.stretches st
             JOIN users.stretch_countries stc ON stc.stretch_id = st.id
-        WHERE country = users.current_user_country()
+        WHERE country = users.user_country()
     $$,
     'Geometry has SRID corresponding to best_utm()');
 
@@ -103,17 +103,10 @@
 
 SELECT throws_ok($$
     INSERT INTO users.list_users VALUES (
-        'waterway_user', 'waterway_user', 'secret1$', 'AT', NULL, 'test4')
+        'waterway_user', 'test_user_at', 'secret1$', 'AT', NULL, 'test4')
     $$,
     42710, NULL,
-    'Reserved role names cannot be used as username');
-
-SELECT throws_ok($$
-    INSERT INTO users.list_users VALUES (
-        'waterway_user', 'test_user_at', 'secret1$', 'AT', NULL, 'test4')
-    $$,
-    23505, NULL,
-    'No duplicate user name is allowed');
+    'No existing role name is allowed');
 
 SELECT throws_ok($$
     INSERT INTO users.list_users VALUES (
@@ -176,7 +169,7 @@
     UPDATE users.list_users
         SET (pw, map_extent, email_address)
             = ('user_at2!', 'BOX(0 0,1 1)', 'user_at_test')
-        WHERE country = users.current_user_country()
+        WHERE country = users.user_country()
             AND username <> current_user
         RETURNING *
     $$,
@@ -271,8 +264,8 @@
                     WHERE username = 'test_user_at'), 'test4')
         WHERE username = 'test_user_at'
     $$,
-    23505, NULL,
-    'No duplicate user name is allowed');
+    42710, NULL,
+    'No existing role name is allowed');
 
 -- Test password policy (only one rule to ensure it's also used on update)
 SELECT throws_ok($$
--- a/schema/run_tests.sh	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/run_tests.sh	Wed Oct 30 11:31:12 2019 +0100
@@ -80,7 +80,7 @@
     -c 'SET client_min_messages TO WARNING' \
     -c "DROP ROLE IF EXISTS $TEST_ROLES" \
     -f "$BASEDIR"/tap_tests_data.sql \
-    -c "SELECT plan(78 + (
+    -c "SELECT plan(81 + (
             SELECT count(*)::int
                 FROM information_schema.tables
                 WHERE table_schema = 'waterway'))" \
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1307/01.improve_rolename_check.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,78 @@
+ALTER TABLE internal.user_profiles
+    ADD CHECK(to_regrole(quote_ident(username)) IS NOT NULL);
+
+CREATE OR REPLACE FUNCTION internal.create_user() RETURNS trigger
+AS $$
+BEGIN
+    IF NEW.map_extent IS NULL
+    THEN
+        NEW.map_extent = ST_Extent(CAST(area AS geometry))
+            FROM users.stretches st
+                JOIN users.stretch_countries stc ON stc.stretch_id = st.id
+            WHERE stc.country = NEW.country;
+    END IF;
+
+    IF NEW.username IS NOT NULL
+    -- otherwise let the constraint on user_profiles speak
+    THEN
+        EXECUTE format(
+            'CREATE ROLE %I IN ROLE %I LOGIN PASSWORD %L',
+            NEW.username,
+            NEW.rolname,
+            internal.check_password(NEW.pw));
+    END IF;
+
+    INSERT INTO internal.user_profiles (
+        username, country, map_extent, email_address)
+        VALUES (NEW.username, NEW.country, NEW.map_extent, NEW.email_address);
+
+    -- Do not leak new password
+    NEW.pw = '';
+    RETURN NEW;
+END;
+$$
+    LANGUAGE plpgsql
+    SECURITY DEFINER;
+
+CREATE OR REPLACE FUNCTION internal.update_user() RETURNS trigger
+AS $$
+DECLARE
+    cur_username varchar;
+BEGIN
+    cur_username = OLD.username;
+
+    IF NEW.username <> cur_username
+    THEN
+        EXECUTE format(
+            'ALTER ROLE %I RENAME TO %I', cur_username, NEW.username);
+        cur_username = NEW.username;
+    END IF;
+
+    UPDATE internal.user_profiles p
+        SET (username, country, map_extent, email_address)
+        = (NEW.username, NEW.country, NEW.map_extent, NEW.email_address)
+        WHERE p.username = cur_username;
+
+    IF NEW.rolname <> OLD.rolname
+    THEN
+        EXECUTE format(
+            'REVOKE %I FROM %I', OLD.rolname, cur_username);
+        EXECUTE format(
+            'GRANT %I TO %I', NEW.rolname, cur_username);
+    END IF;
+
+    IF NEW.pw IS NOT NULL AND NEW.pw <> ''
+    THEN
+        EXECUTE format(
+            'ALTER ROLE %I PASSWORD %L',
+            cur_username,
+            internal.check_password(NEW.pw));
+    END IF;
+
+    -- Do not leak new password
+    NEW.pw = '';
+    RETURN NEW;
+END;
+$$
+    LANGUAGE plpgsql
+    SECURITY DEFINER;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1308/01.add_section_country.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,33 @@
+CREATE FUNCTION users.user_country(user_name name DEFAULT current_user)
+    RETURNS internal.user_profiles.country%TYPE
+    AS $$
+        SELECT country FROM users.list_users
+            WHERE username = user_name
+    $$
+    LANGUAGE SQL
+    STABLE PARALLEL SAFE;
+
+ALTER TABLE waterway.sections ADD country char(2) REFERENCES countries
+    DEFAULT users.user_country();
+
+-- Try to find out the country of existing sections:
+UPDATE waterway.sections s SET country = (
+    SELECT COALESCE(
+        users.user_country((
+            SELECT i.username
+                FROM import.imports i
+                    JOIN import.import_logs ON i.id = import_id
+                WHERE i.kind = 'sec'
+                    AND (state = 'accepted' AND s.staging_done
+                        OR state = 'pending' AND NOT s.staging_done)
+                    AND msg = 'Storing section ''' || s.name || ''''
+                ORDER BY i.enqueued DESC FETCH FIRST ROW ONLY)),
+        '--'));
+
+ALTER TABLE waterway.sections ALTER country SET NOT NULL;
+
+DROP POLICY responsibility_area ON waterway.sections;
+CREATE POLICY same_country ON waterway.sections
+    FOR ALL TO waterway_admin
+    USING (country = (
+        SELECT country FROM users.list_users WHERE username = current_user));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1309/01.expose_section_country.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,88 @@
+CREATE TEMP TABLE base_views (name, def) AS VALUES (
+    'gauges_base_view', $$
+    SELECT
+        g.location,
+        isrs_asText(g.location) AS isrs_code,
+        g.objname,
+        g.geom,
+        g.applicability_from_km,
+        g.applicability_to_km,
+        g.validity,
+        g.zero_point,
+        g.geodref,
+        g.date_info,
+        g.source_organization,
+        g.erased,
+        r.rwls AS reference_water_levels,
+        wl.measure_date AS gm_measuredate,
+        wl.water_level AS gm_waterlevel,
+        wl.n AS gm_n_14d,
+        fca.forecast_accuracy_3d,
+        fca.forecast_accuracy_1d
+    FROM waterway.gauges g
+        LEFT JOIN (SELECT location, validity,
+                    json_strip_nulls(json_object_agg(
+                        coalesce(depth_reference, 'empty'), value)) AS rwls
+                FROM waterway.gauges_reference_water_levels
+                GROUP BY location, validity) AS r
+            USING (location, validity)
+        LEFT JOIN (SELECT DISTINCT ON (location)
+                    location,
+                    date_issue,
+                    measure_date,
+                    water_level,
+                    count(*) OVER (PARTITION BY location) AS n
+                FROM waterway.gauge_measurements
+                -- consider all measurements within 14 days plus a tolerance
+                WHERE measure_date
+                    >= current_timestamp - '14 days 00:15'::interval
+                ORDER BY location, measure_date DESC) AS wl
+            USING (location)
+        LEFT JOIN (SELECT DISTINCT ON (location)
+                    location,
+                    date_issue,
+                    max(acc) FILTER (WHERE measure_date
+                            <= current_timestamp + '1 day'::interval)
+                        OVER loc_date_issue AS forecast_accuracy_1d,
+                    max(acc) OVER loc_date_issue AS forecast_accuracy_3d
+                FROM (SELECT location, date_issue, measure_date,
+                        GREATEST(water_level - lower(conf_interval),
+                            upper(conf_interval) - water_level) AS acc
+                    FROM waterway.gauge_predictions
+                    -- consider predictions made within last 14 days ...
+                    WHERE date_issue
+                        >= current_timestamp - '14 days 00:15'::interval
+                        -- ... for the next three days from now
+                        AND measure_date BETWEEN current_timestamp
+                            AND current_timestamp + '3 days'::interval) AS acc
+                WINDOW loc_date_issue AS (PARTITION BY location, date_issue)
+                ORDER BY location, date_issue DESC) AS fca
+            -- Show only forecasts issued with latest measurements or later
+            ON fca.location = g.location AND fca.date_issue >= wl.date_issue
+    $$);
+
+UPDATE sys_admin.published_services SET view_def = $$
+        SELECT
+            s.id,
+            s.name,
+            (s.section).lower::varchar as lower,
+            (s.section).upper::varchar as upper,
+            s.area,
+            s.objnam,
+            s.nobjnam,
+            s.country,
+            s.date_info,
+            s.source_organization,
+            s.staging_done,
+            min(g.gm_measuredate) AS gm_measuredate,
+            min(g.gm_n_14d) AS gm_n_14d,
+            max(g.forecast_accuracy_3d) AS forecast_accuracy_3d,
+            max(g.forecast_accuracy_1d) AS forecast_accuracy_1d
+        FROM waterway.sections s
+            LEFT JOIN (
+    $$ || (SELECT def FROM base_views WHERE name = 'gauges_base_view') || $$
+            WHERE NOT erased) AS g
+                ON g.location <@ s.section
+        GROUP BY s.id
+    $$
+    WHERE schema = 'waterway' AND name = 'sections_geoserver'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1310/01.import-changed.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,6 @@
+ALTER TABLE import.imports ADD COLUMN changed timestamp WITH time zone NOT NULL DEFAULT CURRENT_TIMESTAMP;
+
+UPDATE import.imports imp SET
+  changed = (SELECT coalesce(max(time), imp.changed)
+             FROM import.import_logs
+             WHERE import_id = imp.id);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1311/01.improve_section_uniqueness.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,2 @@
+ALTER TABLE waterway.sections
+    ADD EXCLUDE USING GiST (name WITH =, country WITH <>)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1312/01.hide_sys_admins.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,19 @@
+CREATE OR REPLACE VIEW users.list_users WITH (security_barrier) AS
+    SELECT
+        r.rolname,
+        p.username,
+        CAST('' AS varchar) AS pw,
+        p.country,
+        p.map_extent,
+        p.email_address
+    FROM internal.user_profiles p
+        JOIN pg_roles u ON p.username = u.rolname
+        JOIN pg_auth_members a ON u.oid = a.member
+        JOIN pg_roles r ON a.roleid = r.oid
+    WHERE p.username = current_user
+        OR pg_has_role('waterway_admin', 'MEMBER')
+            AND p.country = (
+                SELECT country FROM internal.user_profiles
+                    WHERE username = current_user)
+            AND r.rolname <> 'sys_admin'
+        OR pg_has_role('sys_admin', 'MEMBER')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1313/01.optimize_area_policies.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,49 @@
+-- This is Free Software under GNU Affero General Public License v >= 3.0
+-- without warranty, see README.md and license for details.
+
+-- SPDX-License-Identifier: AGPL-3.0-or-later
+-- License-Filename: LICENSES/AGPL-3.0.txt
+
+-- Copyright (C) 2019 by via donau
+--   – Österreichische Wasserstraßen-Gesellschaft mbH
+-- Software engineering by Intevation GmbH
+
+-- Author(s):
+--  * Sascha Wilde <sascha.wilde@intevation.de>
+--  * Tom Gottried <tom@intevation.de>
+
+
+ALTER POLICY same_country ON waterway.gauge_measurements
+    TO waterway_admin
+    USING (staging_done
+           OR (location).country_code =
+               (SELECT country FROM users.list_users
+                WHERE username = current_user))
+    WITH CHECK ((location).country_code =
+                 (SELECT country FROM users.list_users
+                  WHERE username = current_user));
+
+ALTER POLICY same_country ON waterway.waterway_profiles
+    TO waterway_admin
+    USING (staging_done
+           OR (location).country_code =
+               (SELECT country FROM users.list_users
+                WHERE username = current_user))
+    WITH CHECK ((location).country_code =
+                (SELECT country FROM users.list_users
+                 WHERE username = current_user));
+
+ALTER POLICY responsibility_area ON waterway.bottlenecks
+    TO waterway_admin
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
+
+ALTER POLICY responsibility_area ON waterway.sounding_results
+    TO waterway_admin
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
+
+ALTER POLICY responsibility_area ON waterway.fairway_dimensions
+    TO waterway_admin
+    USING (staging_done OR users.utm_covers(area))
+    WITH CHECK (users.utm_covers(area));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/schema/updates/1314/01.fix_sr_dateinfo.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -0,0 +1,9 @@
+UPDATE sys_admin.published_services SET view_def = $$
+        SELECT bottleneck_id,
+            to_char(date_info, 'YYYY-MM-DD') AS date_info,
+            height,
+            areas,
+            surtyp
+        FROM waterway.sounding_results_iso_areas ia
+            JOIN waterway.sounding_results sr ON sr.id = ia.sounding_result_id
+    $$ WHERE name = 'sounding_results_areas_geoserver';
\ No newline at end of file
--- a/schema/version.sql	Wed Oct 23 12:37:01 2019 +0200
+++ b/schema/version.sql	Wed Oct 30 11:31:12 2019 +0100
@@ -1,1 +1,1 @@
-INSERT INTO gemma_schema_version(version) VALUES (1306);
+INSERT INTO gemma_schema_version(version) VALUES (1314);
--- a/style-templates/distance_marks_geoserver.sld-template	Wed Oct 23 12:37:01 2019 +0200
+++ b/style-templates/distance_marks_geoserver.sld-template	Wed Oct 30 11:31:12 2019 +0100
@@ -61,7 +61,7 @@
                   <sld:CssParameter name="fill-opacity">{{ .distance_marks_fill_opacity }}</sld:CssParameter>
                 </sld:Fill>
                 <sld:Stroke>
-                  <sld:CssParameter name="stroke">#5555FF</sld:CssParameter>
+                  <sld:CssParameter name="stroke">{{ .distance_marks_stroke }}</sld:CssParameter>
                   <sld:CssParameter name="stroke-width">1</sld:CssParameter>
                 </sld:Stroke>
               </sld:Mark>
--- a/style-templates/sounding_differences.sld-template	Wed Oct 23 12:37:01 2019 +0200
+++ b/style-templates/sounding_differences.sld-template	Wed Oct 30 11:31:12 2019 +0100
@@ -50,7 +50,7 @@
             </ogc:And>
           </ogc:Filter>
         {{- end }}
-        <se:MaxScaleDenominator>34e3</se:MaxScaleDenominator>
+        <se:MaxScaleDenominator>5e3</se:MaxScaleDenominator>
           <se:PolygonSymbolizer>
             <se:Fill>
               <se:SvgParameter name="fill">{{ .Color }}</se:SvgParameter>
@@ -93,7 +93,7 @@
             </ogc:And>
           </ogc:Filter>
         {{- end }}
-        <se:MinScaleDenominator>34e3</se:MinScaleDenominator>
+        <se:MinScaleDenominator>5e3</se:MinScaleDenominator>
           <se:PolygonSymbolizer>
             <se:Fill>
               <se:SvgParameter name="fill">{{ .Color }}</se:SvgParameter>