diff --git a/src/.gitattributes b/.gitattributes
similarity index 100%
rename from src/.gitattributes
rename to .gitattributes
diff --git a/build/cite/wcs10/citewcs-1.0/coverages/img_sample/usa.prj b/build/cite/wcs10/citewcs-1.0/coverages/img_sample/usa.prj
index 9612216f7fa..3d2d4d2b836 100644
--- a/build/cite/wcs10/citewcs-1.0/coverages/img_sample/usa.prj
+++ b/build/cite/wcs10/citewcs-1.0/coverages/img_sample/usa.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/build/cite/wms11/citewms-1.1/mbdemos/lib/util/wz_jsgraphics/wz_jsgraphics.js b/build/cite/wms11/citewms-1.1/mbdemos/lib/util/wz_jsgraphics/wz_jsgraphics.js
index 758c3bc2399..57fb0ff37c8 100644
--- a/build/cite/wms11/citewms-1.1/mbdemos/lib/util/wz_jsgraphics/wz_jsgraphics.js
+++ b/build/cite/wms11/citewms-1.1/mbdemos/lib/util/wz_jsgraphics/wz_jsgraphics.js
@@ -1,923 +1,923 @@
-/* This notice must be untouched at all times.
-
-wz_jsgraphics.js v. 2.3
-The latest version is available at
-http://www.walterzorn.com
-or http://www.devira.com
-or http://www.walterzorn.de
-
-Copyright (c) 2002-2004 Walter Zorn. All rights reserved.
-Created 3. 11. 2002 by Walter Zorn (Web: http://www.walterzorn.com )
-Last modified: 15. 3. 2004
-
-Performance optimizations for Internet Explorer
-by Thomas Frank and John Holdsworth.
-fillPolygon method implemented by Matthieu Haller.
-
-High Performance JavaScript Graphics Library.
-Provides methods
-- to draw lines, rectangles, ellipses, polygons
- with specifiable line thickness,
-- to fill rectangles and ellipses
-- to draw text.
-NOTE: Operations, functions and branching have rather been optimized
-to efficiency and speed than to shortness of source code.
-
-This program is free software;
-you can redistribute it and/or modify it under the terms of the
-GNU General Public License as published by the Free Software Foundation;
-either version 2 of the License, or (at your option) any later version.
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-See the GNU General Public License
-at http://www.gnu.org/copyleft/gpl.html for more details.
-*/
-
-
-var jg_ihtm, jg_ie, jg_fast, jg_dom, jg_moz,
-jg_n4 = (document.layers && typeof document.classes != "undefined");
-
-
-function chkDHTM(x, i)
-{
- x = document.body || null;
- jg_ie = x && typeof x.insertAdjacentHTML != "undefined";
- jg_dom = (x && !jg_ie &&
- typeof x.appendChild != "undefined" &&
- typeof document.createRange != "undefined" &&
- typeof (i = document.createRange()).setStartBefore != "undefined" &&
- typeof i.createContextualFragment != "undefined");
- jg_ihtm = !jg_ie && !jg_dom && x && typeof x.innerHTML != "undefined";
- jg_fast = jg_ie && document.all && !window.opera;
- jg_moz = jg_dom && typeof x.style.MozOpacity != "undefined";
-}
-
-
-function pntDoc()
-{
- this.wnd.document.write(jg_fast? this.htmRpc() : this.htm);
- this.htm = '';
-}
-
-
-function pntCnvDom()
-{
- var x = document.createRange();
- x.setStartBefore(this.cnv);
- x = x.createContextualFragment(jg_fast? this.htmRpc() : this.htm);
- this.cnv.appendChild(x);
- this.htm = '';
-}
-
-
-function pntCnvIe()
-{
- this.cnv.insertAdjacentHTML("beforeEnd", jg_fast? this.htmRpc() : this.htm);
- this.htm = '';
-}
-
-
-function pntCnvIhtm()
-{
- this.cnv.innerHTML += this.htm;
- this.htm = '';
-}
-
-
-function pntCnv()
-{
- this.htm = '';
-}
-
-
-function mkDiv(x, y, w, h)
-{
- this.htm += '
'+
+ ''+
+ '<\/div>';
+ }
+
+
+ this.clear = function()
+ {
+ this.htm = "";
+ if (this.cnv) this.cnv.innerHTML = this.defhtm;
+ };
+
+
+ this.mkOvQds = function(cx, cy, xl, xr, yt, yb, w, h)
+ {
+ this.mkDiv(xr+cx, yt+cy, w, h);
+ this.mkDiv(xr+cx, yb+cy, w, h);
+ this.mkDiv(xl+cx, yb+cy, w, h);
+ this.mkDiv(xl+cx, yt+cy, w, h);
+ };
+
+ this.setStroke(1);
+ this.setFont('verdana,geneva,helvetica,sans-serif', String.fromCharCode(0x31, 0x32, 0x70, 0x78), Font.PLAIN);
+ this.color = '#000000';
+ this.htm = '';
+ this.wnd = wnd || window;
+
+ if (!(jg_ie || jg_dom || jg_ihtm)) chkDHTM();
+ if (typeof id != 'string' || !id) this.paint = pntDoc;
+ else
+ {
+ this.cnv = document.all? (this.wnd.document.all[id] || null)
+ : document.getElementById? (this.wnd.document.getElementById(id) || null)
+ : null;
+ this.defhtm = (this.cnv && this.cnv.innerHTML)? this.cnv.innerHTML : '';
+ this.paint = jg_dom? pntCnvDom : jg_ie? pntCnvIe : jg_ihtm? pntCnvIhtm : pntCnv;
+ }
+
+ this.setPrintable(false);
+}
+
+
+
+function integer_compare(x,y)
+{
+ return (x < y) ? -1 : ((x > y)*1);
+}
+
diff --git a/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/SaveModel.js b/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/SaveModel.js
index 7f960a87c13..4eb9ebbf8cd 100644
--- a/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/SaveModel.js
+++ b/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/SaveModel.js
@@ -1,44 +1,44 @@
-/*
-Author: Mike Adair mike.adairATccrs.nrcan.gc.ca
-License: GPL as per: http://www.gnu.org/copyleft/gpl.html
-
-$Id: SaveModel.js,v 1.8 2004/11/26 15:55:20 madair1 Exp $
-*/
-
-// Ensure this object's dependancies are loaded.
-mapbuilder.loadScript(baseDir+"/widget/WidgetBase.js");
-
-/**
- * Widget which will display some anchor tags for accessing model URLs.
- * TBD: which is the prefered method to use here,
- *
- * @constructor
- * @base WidgetBase
- * @param widgetNode This widget's object node from the configuration document.
- * @param model The model that this widget is a view of.
- */
-
-function SaveModel(widgetNode, model) {
- var base = new WidgetBase(this, widgetNode, model);
-
- /**
- * Initialise params.
- * @param objRef Pointer to this SaveModel object.
- */
- this.init = function(objRef) {
- objRef.stylesheet.setParameter("modelUrl", objRef.model.url);
- }
- this.model.addListener("loadModel", this.init, this);
-
- /**
- * a listenet to set the saved model URL as the href attribute in an anchor link
- * @param objRef Pointer to this SaveModel object.
- */
- this.saveLink = function(objRef, fileUrl) {
- var modelAnchor = document.getElementById(objRef.model.id+"."+objRef.id+".modelUrl");
- modelAnchor.href = fileUrl;
- }
- this.model.addListener("modelSaved", this.saveLink, this);
-
-}
-
+/*
+Author: Mike Adair mike.adairATccrs.nrcan.gc.ca
+License: GPL as per: http://www.gnu.org/copyleft/gpl.html
+
+$Id: SaveModel.js,v 1.8 2004/11/26 15:55:20 madair1 Exp $
+*/
+
+// Ensure this object's dependancies are loaded.
+mapbuilder.loadScript(baseDir+"/widget/WidgetBase.js");
+
+/**
+ * Widget which will display some anchor tags for accessing model URLs.
+ * TBD: which is the prefered method to use here,
+ *
+ * @constructor
+ * @base WidgetBase
+ * @param widgetNode This widget's object node from the configuration document.
+ * @param model The model that this widget is a view of.
+ */
+
+function SaveModel(widgetNode, model) {
+ var base = new WidgetBase(this, widgetNode, model);
+
+ /**
+ * Initialise params.
+ * @param objRef Pointer to this SaveModel object.
+ */
+ this.init = function(objRef) {
+ objRef.stylesheet.setParameter("modelUrl", objRef.model.url);
+ }
+ this.model.addListener("loadModel", this.init, this);
+
+ /**
+ * a listenet to set the saved model URL as the href attribute in an anchor link
+ * @param objRef Pointer to this SaveModel object.
+ */
+ this.saveLink = function(objRef, fileUrl) {
+ var modelAnchor = document.getElementById(objRef.model.id+"."+objRef.id+".modelUrl");
+ modelAnchor.href = fileUrl;
+ }
+ this.model.addListener("modelSaved", this.saveLink, this);
+
+}
+
diff --git a/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/wms/WmsCapabilities.js b/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/wms/WmsCapabilities.js
index 8125bc312b9..36006143746 100644
--- a/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/wms/WmsCapabilities.js
+++ b/build/cite/wms11/citewms-1.1/mbdemos/lib/widget/wms/WmsCapabilities.js
@@ -1,27 +1,27 @@
-/**
- * Build a Web Map Context (WMC) from a Web Map Server getCapabilities response.
- * @constructor
- * @param url TBD Comment me.
- * @param node TBD Comment me.
- */
-function WMS(url, node) {
- this.url = url;
- this.node=node;
- this.wms = Sarissa.getDomDocument();
- this.wms.async = false;
- // the following two lines are needed for IE
- this.wms.setProperty("SelectionNamespaces", "xmlns:xsl='http://www.w3.org/1999/XSL/Transform'");
- this.wms.setProperty("SelectionLanguage", "XPath");
-
- url = proxyScript + "?onlineresource=" + escape(url);
- this.wms.load(url);
- this.wmsCapabilities2Context=new XslProcessor(baseDir + "/widget/wms/WMSCapabilities2Context.xsl");
-
- /**
- * Request a new URL of a WMS document.
- */
- this.paint= function() {
- var s = this.wmsCapabilities2Context.transformNode(this.wms);
- prompt(s);
- }
-}
+/**
+ * Build a Web Map Context (WMC) from a Web Map Server getCapabilities response.
+ * @constructor
+ * @param url TBD Comment me.
+ * @param node TBD Comment me.
+ */
+function WMS(url, node) {
+ this.url = url;
+ this.node=node;
+ this.wms = Sarissa.getDomDocument();
+ this.wms.async = false;
+ // the following two lines are needed for IE
+ this.wms.setProperty("SelectionNamespaces", "xmlns:xsl='http://www.w3.org/1999/XSL/Transform'");
+ this.wms.setProperty("SelectionLanguage", "XPath");
+
+ url = proxyScript + "?onlineresource=" + escape(url);
+ this.wms.load(url);
+ this.wmsCapabilities2Context=new XslProcessor(baseDir + "/widget/wms/WMSCapabilities2Context.xsl");
+
+ /**
+ * Request a new URL of a WMS document.
+ */
+ this.paint= function() {
+ var s = this.wmsCapabilities2Context.transformNode(this.wms);
+ prompt(s);
+ }
+}
diff --git a/build/cite/wms13/citewms-1.3/styles/BasicPolygon.sld b/build/cite/wms13/citewms-1.3/styles/BasicPolygon.sld
index 56471af32b4..08c0cde52b0 100644
--- a/build/cite/wms13/citewms-1.3/styles/BasicPolygon.sld
+++ b/build/cite/wms13/citewms-1.3/styles/BasicPolygon.sld
@@ -1,24 +1,24 @@
-
-
-
- Blue
-
- Blue
- Blue Polygon
- A filled polygon with outline of 2 pixel width
-
- name
-
-
-
- #0000C0
-
-
- 2
-
-
-
-
-
-
+
+
+
+ Blue
+
+ Blue
+ Blue Polygon
+ A filled polygon with outline of 2 pixel width
+
+ name
+
+
+
+ #0000C0
+
+
+ 2
+
+
+
+
+
+
\ No newline at end of file
diff --git a/build/cite/wms13/citewms-1.3/styles/Forests.sld b/build/cite/wms13/citewms-1.3/styles/Forests.sld
index d06531281a0..8812b39613a 100644
--- a/build/cite/wms13/citewms-1.3/styles/Forests.sld
+++ b/build/cite/wms13/citewms-1.3/styles/Forests.sld
@@ -1,67 +1,67 @@
-
-
-
- Default Styler
- PNG Filled Polygon
-
-
- Feature
-
- name
- External graphic is used as a pattern to fill polygon
- PNG Filled Polygon
-
-
-
-
-
- 30
-
-
- 1.0
-
-
- 0.5
-
-
- image/png
-
-
-
-
-
- #808080
-
-
- 1.0
-
-
-
-
- #000000
-
-
- butt
-
-
- miter
-
-
- 1
-
-
- 1
-
-
- 0
-
-
-
-
-
-
-
+
+
+
+ Default Styler
+ PNG Filled Polygon
+
+
+ Feature
+
+ name
+ External graphic is used as a pattern to fill polygon
+ PNG Filled Polygon
+
+
+
+
+
+ 30
+
+
+ 1.0
+
+
+ 0.5
+
+
+ image/png
+
+
+
+
+
+ #808080
+
+
+ 1.0
+
+
+
+
+ #000000
+
+
+ butt
+
+
+ miter
+
+
+ 1
+
+
+ 1
+
+
+ 0
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Autos.xml b/build/cite/wms13/citewms-1.3/www/Autos.xml
index 1f4f0b769bd..68d415b1d2b 100644
--- a/build/cite/wms13/citewms-1.3/www/Autos.xml
+++ b/build/cite/wms13/citewms-1.3/www/Autos.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- point
-
-
-
-
-
-
-
-
-
-
- CITE Autos
-
-
-
-
- 2009-01-07
-
-
- creation
-
-
-
-
-
-
- This is the cite:Autos layer from the test dataset for the TIME dimension tests in the CITE WMS 1.3.0 test suite. It contains points representing automobile locations at different points in time.
-
-
- eng
-
-
- location
-
-
-
-
-
-
- -0.00342
-
-
- 0.0029
-
-
- -0.0022
-
-
- 0.0022
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ point
+
+
+
+
+
+
+
+
+
+
+ CITE Autos
+
+
+
+
+ 2009-01-07
+
+
+ creation
+
+
+
+
+
+
+ This is the cite:Autos layer from the test dataset for the TIME dimension tests in the CITE WMS 1.3.0 test suite. It contains points representing automobile locations at different points in time.
+
+
+ eng
+
+
+ location
+
+
+
+
+
+
+ -0.00342
+
+
+ 0.0029
+
+
+ -0.0022
+
+
+ 0.0022
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/BasicPolygons.xml b/build/cite/wms13/citewms-1.3/www/BasicPolygons.xml
index 105e44f9ea8..b09ed5c851b 100644
--- a/build/cite/wms13/citewms-1.3/www/BasicPolygons.xml
+++ b/build/cite/wms13/citewms-1.3/www/BasicPolygons.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE BasicPolygons
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:BasicPolygons layer from the test dataset for the CITE WMS 1.3.0 test suite. Contains a diamond and two overlapping squares.
-
-
- eng
-
-
- boundaries
-
-
-
-
-
-
- -2.0
-
-
- 2.0
-
-
- -1.0
-
-
- 6.0
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE BasicPolygons
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:BasicPolygons layer from the test dataset for the CITE WMS 1.3.0 test suite. Contains a diamond and two overlapping squares.
+
+
+ eng
+
+
+ boundaries
+
+
+
+
+
+
+ -2.0
+
+
+ 2.0
+
+
+ -1.0
+
+
+ 6.0
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Bridges.xml b/build/cite/wms13/citewms-1.3/www/Bridges.xml
index 9ac36f49bad..6536175d735 100644
--- a/build/cite/wms13/citewms-1.3/www/Bridges.xml
+++ b/build/cite/wms13/citewms-1.3/www/Bridges.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- point
-
-
-
-
-
-
-
-
-
-
- CITE Bridges
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Bridges layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Cam Bridge.
-
-
- eng
-
-
- transportation
-
-
-
-
-
-
- 0.0001
-
-
- 0.0003
-
-
- 0.0006
-
-
- 0.0008
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ point
+
+
+
+
+
+
+
+
+
+
+ CITE Bridges
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Bridges layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Cam Bridge.
+
+
+ eng
+
+
+ transportation
+
+
+
+
+
+
+ 0.0001
+
+
+ 0.0003
+
+
+ 0.0006
+
+
+ 0.0008
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/BuildingCenters.xml b/build/cite/wms13/citewms-1.3/www/BuildingCenters.xml
index 3c7acd9dd9c..e8305a81185 100644
--- a/build/cite/wms13/citewms-1.3/www/BuildingCenters.xml
+++ b/build/cite/wms13/citewms-1.3/www/BuildingCenters.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- point
-
-
-
-
-
-
-
-
-
-
- CITE BuildingCenters
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:BuildingCenters layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the center points for the two buildings along Main Street.
-
-
- eng
-
-
- location
-
-
-
-
-
-
- 0.0008
-
-
- 0.0024
-
-
- 0.0005
-
-
- 0.0010
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ point
+
+
+
+
+
+
+
+
+
+
+ CITE BuildingCenters
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:BuildingCenters layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the center points for the two buildings along Main Street.
+
+
+ eng
+
+
+ location
+
+
+
+
+
+
+ 0.0008
+
+
+ 0.0024
+
+
+ 0.0005
+
+
+ 0.0010
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Buildings.xml b/build/cite/wms13/citewms-1.3/www/Buildings.xml
index 36b4da4336d..2112ca8c5a7 100644
--- a/build/cite/wms13/citewms-1.3/www/Buildings.xml
+++ b/build/cite/wms13/citewms-1.3/www/Buildings.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE Buildings
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Buildings layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the two buildings along Main Street.
-
-
- eng
-
-
- structure
-
-
-
-
-
-
- 0.0008
-
-
- 0.0024
-
-
- 0.0005
-
-
- 0.0010
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE Buildings
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Buildings layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the two buildings along Main Street.
+
+
+ eng
+
+
+ structure
+
+
+
+
+
+
+ 0.0008
+
+
+ 0.0024
+
+
+ 0.0005
+
+
+ 0.0010
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/DividedRoutes.xml b/build/cite/wms13/citewms-1.3/www/DividedRoutes.xml
index 3f46e4e3500..9c12c0f646c 100644
--- a/build/cite/wms13/citewms-1.3/www/DividedRoutes.xml
+++ b/build/cite/wms13/citewms-1.3/www/DividedRoutes.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- curve
-
-
-
-
-
-
-
-
-
-
- CITE DividedRoutes
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:DividedRoutes layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains both lanes of Route 75.
-
-
- eng
-
-
- transportation
-
-
-
-
-
-
- -0.0034
-
-
- -0.0026
-
-
- -0.0024
-
-
- 0.0024
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ curve
+
+
+
+
+
+
+
+
+
+
+ CITE DividedRoutes
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:DividedRoutes layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains both lanes of Route 75.
+
+
+ eng
+
+
+ transportation
+
+
+
+
+
+
+ -0.0034
+
+
+ -0.0026
+
+
+ -0.0024
+
+
+ 0.0024
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Forests.xml b/build/cite/wms13/citewms-1.3/www/Forests.xml
index 36875254385..48c983f1d8a 100644
--- a/build/cite/wms13/citewms-1.3/www/Forests.xml
+++ b/build/cite/wms13/citewms-1.3/www/Forests.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE Forests
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Forests layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the State Forest polygon.
-
-
- eng
-
-
- biota
-
-
-
-
-
-
- -0.0014
-
-
- 0.0042
-
-
- -0.0024
-
-
- 0.0018
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE Forests
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Forests layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains the State Forest polygon.
+
+
+ eng
+
+
+ biota
+
+
+
+
+
+
+ -0.0014
+
+
+ 0.0042
+
+
+ -0.0024
+
+
+ 0.0018
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Lakes.xml b/build/cite/wms13/citewms-1.3/www/Lakes.xml
index f5a7c1fc2a3..49cf9c13ffe 100644
--- a/build/cite/wms13/citewms-1.3/www/Lakes.xml
+++ b/build/cite/wms13/citewms-1.3/www/Lakes.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE Lakes
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Lakes layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Blue Lake.
-
-
- eng
-
-
- inlandWaters
-
-
-
-
-
-
- 0.0006
-
-
- 0.0032
-
-
- -0.0018
-
-
- -0.0002
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE Lakes
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Lakes layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Blue Lake.
+
+
+ eng
+
+
+ inlandWaters
+
+
+
+
+
+
+ 0.0006
+
+
+ 0.0032
+
+
+ -0.0018
+
+
+ -0.0002
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/LakesWithElevation.xml b/build/cite/wms13/citewms-1.3/www/LakesWithElevation.xml
index d69b8ef3f4b..de10fedc043 100644
--- a/build/cite/wms13/citewms-1.3/www/LakesWithElevation.xml
+++ b/build/cite/wms13/citewms-1.3/www/LakesWithElevation.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE Lakes
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Lakes layer from the test dataset for the vector elevation tests in the CITE WMS 1.3.0 test suite. It contains contains polygons representing the edge of Blue Lake at serveral depths.
-
-
- eng
-
-
- inlandWaters
-
-
-
-
-
-
- 0.0006
-
-
- 0.0032
-
-
- -0.0018
-
-
- -0.0002
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE Lakes
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Lakes layer from the test dataset for the vector elevation tests in the CITE WMS 1.3.0 test suite. It contains contains polygons representing the edge of Blue Lake at serveral depths.
+
+
+ eng
+
+
+ inlandWaters
+
+
+
+
+
+
+ 0.0006
+
+
+ 0.0032
+
+
+ -0.0018
+
+
+ -0.0002
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/MapNeatline.xml b/build/cite/wms13/citewms-1.3/www/MapNeatline.xml
index 3a23bf59e00..8a7d56a75ad 100644
--- a/build/cite/wms13/citewms-1.3/www/MapNeatline.xml
+++ b/build/cite/wms13/citewms-1.3/www/MapNeatline.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- curve
-
-
-
-
-
-
-
-
-
-
- CITE MapNeatline
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:MapNeatline layer from the test dataset for the CITE WMS 1.3.0 tests. It contains the border surrounding the Blue Lake vicinity.
-
-
- eng
-
-
- boundaries
-
-
-
-
-
-
- -0.0042
-
-
- 0.0042
-
-
- -0.0024
-
-
- 0.0024
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ curve
+
+
+
+
+
+
+
+
+
+
+ CITE MapNeatline
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:MapNeatline layer from the test dataset for the CITE WMS 1.3.0 tests. It contains the border surrounding the Blue Lake vicinity.
+
+
+ eng
+
+
+ boundaries
+
+
+
+
+
+
+ -0.0042
+
+
+ 0.0042
+
+
+ -0.0024
+
+
+ 0.0024
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/NamedPlaces.xml b/build/cite/wms13/citewms-1.3/www/NamedPlaces.xml
index 9b5599004a7..11fb5e53164 100644
--- a/build/cite/wms13/citewms-1.3/www/NamedPlaces.xml
+++ b/build/cite/wms13/citewms-1.3/www/NamedPlaces.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE NamedPlaces
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:NamedPlaces layer from the test dataset for the CITE WMS 1.3.0 tests. It contains Ashton and Goose Island.
-
-
- eng
-
-
- boundaries
-
-
-
-
-
-
- 0.0014
-
-
- 0.0042
-
-
- -0.0011
-
-
- 0.0024
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE NamedPlaces
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:NamedPlaces layer from the test dataset for the CITE WMS 1.3.0 tests. It contains Ashton and Goose Island.
+
+
+ eng
+
+
+ boundaries
+
+
+
+
+
+
+ 0.0014
+
+
+ 0.0042
+
+
+ -0.0011
+
+
+ 0.0024
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Ponds.xml b/build/cite/wms13/citewms-1.3/www/Ponds.xml
index e1450c2130e..ef5f50ba400 100644
--- a/build/cite/wms13/citewms-1.3/www/Ponds.xml
+++ b/build/cite/wms13/citewms-1.3/www/Ponds.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- surface
-
-
-
-
-
-
-
-
-
-
- CITE Ponds
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Ponds layer from the test dataset for the CITE WMS 1.3.0 tests. It contains both pools of Stock Pond.
-
-
- eng
-
-
- inlandWaters
-
-
-
-
-
-
- -0.0020
-
-
- -0.0014
-
-
- 0.0016
-
-
- 0.0020
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ surface
+
+
+
+
+
+
+
+
+
+
+ CITE Ponds
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Ponds layer from the test dataset for the CITE WMS 1.3.0 tests. It contains both pools of Stock Pond.
+
+
+ eng
+
+
+ inlandWaters
+
+
+
+
+
+
+ -0.0020
+
+
+ -0.0014
+
+
+ 0.0016
+
+
+ 0.0020
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/RoadSegments.xml b/build/cite/wms13/citewms-1.3/www/RoadSegments.xml
index 9f87da6387f..97370fafeb9 100644
--- a/build/cite/wms13/citewms-1.3/www/RoadSegments.xml
+++ b/build/cite/wms13/citewms-1.3/www/RoadSegments.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- curve
-
-
-
-
-
-
-
-
-
-
- CITE RoadSegments
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:RoadSegments layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains all the sections of Route 5, Main Street, and the dirt road.
-
-
- eng
-
-
- transportation
-
-
-
-
-
-
- -0.0042
-
-
- 0.0042
-
-
- -0.0024
-
-
- 0.0024
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ curve
+
+
+
+
+
+
+
+
+
+
+ CITE RoadSegments
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:RoadSegments layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains all the sections of Route 5, Main Street, and the dirt road.
+
+
+ eng
+
+
+ transportation
+
+
+
+
+
+
+ -0.0042
+
+
+ 0.0042
+
+
+ -0.0024
+
+
+ 0.0024
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Streams.xml b/build/cite/wms13/citewms-1.3/www/Streams.xml
index fedeb998d8c..b87de215c23 100644
--- a/build/cite/wms13/citewms-1.3/www/Streams.xml
+++ b/build/cite/wms13/citewms-1.3/www/Streams.xml
@@ -1,87 +1,87 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
-
-
- curve
-
-
-
-
-
-
-
-
-
-
- CITE Streams
-
-
-
-
- 2009-01-07
-
-
- revision
-
-
-
-
-
-
- This is the cite:Streams layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Cam Stream and the unnamed stream south of Blue Lake.
-
-
- eng
-
-
- inlandWaters
-
-
-
-
-
-
- -0.0004
-
-
- 0.0036
-
-
- -0.0024
-
-
- 0.0024
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+
+
+ curve
+
+
+
+
+
+
+
+
+
+
+ CITE Streams
+
+
+
+
+ 2009-01-07
+
+
+ revision
+
+
+
+
+
+
+ This is the cite:Streams layer from the test dataset for the CITE WMS 1.3.0 test suite. It contains Cam Stream and the unnamed stream south of Blue Lake.
+
+
+ eng
+
+
+ inlandWaters
+
+
+
+
+
+
+ -0.0004
+
+
+ 0.0036
+
+
+ -0.0024
+
+
+ 0.0024
+
+
+
+
+
+
+
+
+
diff --git a/build/cite/wms13/citewms-1.3/www/Terrain.xml b/build/cite/wms13/citewms-1.3/www/Terrain.xml
index b513734cc4d..99cb48d640f 100644
--- a/build/cite/wms13/citewms-1.3/www/Terrain.xml
+++ b/build/cite/wms13/citewms-1.3/www/Terrain.xml
@@ -1,118 +1,118 @@
-
-
-
-
-
- Open Geospatial Consortium
-
-
- originator
-
-
-
-
- 2009-03-17
-
-
-
-
- 1
-
-
-
-
- vertical
-
-
- 1
-
-
- 1
-
-
-
-
- area
-
-
- false
-
-
- false
-
-
-
- -0.5 -0.5
-
-
-
-
- 0.49833333333333333333333333333333 0.49833333333333333333333333333333
-
-
-
- lowerLeft
-
-
-
-
-
-
-
-
- CITE Terrain
-
-
-
-
- 2009-01-07
-
-
- creation
-
-
-
-
-
-
- This is the cite:Terrain layer from the test dataset for the raster elevation tests in the CITE WMS 1.3.0 test suite. It has values that range from 0 to 425m that include a "high spot" with values greater than 325m and a "low spot" with values less than 200m. The remainder of the dataset is filled with values between 200m and 325m, including a few values that are exactly 250m.
-
-
- eng
-
-
- elevation
-
-
-
-
-
-
- -0.5
-
-
- 0.5
-
-
- -0.5
-
-
- 0.5
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ Open Geospatial Consortium
+
+
+ originator
+
+
+
+
+ 2009-03-17
+
+
+
+
+ 1
+
+
+
+
+ vertical
+
+
+ 1
+
+
+ 1
+
+
+
+
+ area
+
+
+ false
+
+
+ false
+
+
+
+ -0.5 -0.5
+
+
+
+
+ 0.49833333333333333333333333333333 0.49833333333333333333333333333333
+
+
+
+ lowerLeft
+
+
+
+
+
+
+
+
+ CITE Terrain
+
+
+
+
+ 2009-01-07
+
+
+ creation
+
+
+
+
+
+
+ This is the cite:Terrain layer from the test dataset for the raster elevation tests in the CITE WMS 1.3.0 test suite. It has values that range from 0 to 425m that include a "high spot" with values greater than 325m and a "low spot" with values less than 200m. The remainder of the dataset is filled with values between 200m and 325m, including a few values that are exactly 250m.
+
+
+ eng
+
+
+ elevation
+
+
+
+
+
+
+ -0.5
+
+
+ 0.5
+
+
+ -0.5
+
+
+ 0.5
+
+
+
+
+
+
+
+
+
diff --git a/data/citensg-1.0/styles/raster_with_scales.sld b/data/citensg-1.0/styles/raster_with_scales.sld
index a61e195e97b..73580e331fa 100644
--- a/data/citensg-1.0/styles/raster_with_scales.sld
+++ b/data/citensg-1.0/styles/raster_with_scales.sld
@@ -1,22 +1,22 @@
-
-
-
- raster_layer
-
- raster
- Opaque Raster
- A sample style for rasters visible between 300000 and 1000000 map scale
-
- Feature
-
- 300000
- 1000000
-
- 1.0
-
-
-
-
-
+
+
+
+ raster_layer
+
+ raster
+ Opaque Raster
+ A sample style for rasters visible between 300000 and 1000000 map scale
+
+ Feature
+
+ 300000
+ 1000000
+
+ 1.0
+
+
+
+
+
\ No newline at end of file
diff --git a/data/citensg-1.0/workspaces/cite_wmts_10/styles/Default_style.sld b/data/citensg-1.0/workspaces/cite_wmts_10/styles/Default_style.sld
index 7c2c230e984..43e64353456 100644
--- a/data/citensg-1.0/workspaces/cite_wmts_10/styles/Default_style.sld
+++ b/data/citensg-1.0/workspaces/cite_wmts_10/styles/Default_style.sld
@@ -1,20 +1,20 @@
-
-
-
- Default_style
-
- Default_style
- A boring default style
- A sample style for rasters
-
- Feature
-
-
- 1.0
-
-
-
-
-
+
+
+
+ Default_style
+
+ Default_style
+ A boring default style
+ A sample style for rasters
+
+ Feature
+
+
+ 1.0
+
+
+
+
+
\ No newline at end of file
diff --git a/data/release/coverages/arc_sample/precip30min.prj b/data/release/coverages/arc_sample/precip30min.prj
index 2051f9e4e1a..41844c0a090 100644
--- a/data/release/coverages/arc_sample/precip30min.prj
+++ b/data/release/coverages/arc_sample/precip30min.prj
@@ -1,9 +1,9 @@
- GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+ GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/img_sample/usa.prj b/data/release/coverages/img_sample/usa.prj
index 9612216f7fa..3d2d4d2b836 100644
--- a/data/release/coverages/img_sample/usa.prj
+++ b/data/release/coverages/img_sample/usa.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_0.pgw b/data/release/coverages/mosaic_sample/global_mosaic_0.pgw
index 9c17b085d94..91299c7c910 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_0.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_0.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-6.375141924963005
-38.491372862272705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+6.375141924963005
+38.491372862272705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_0.prj b/data/release/coverages/mosaic_sample/global_mosaic_0.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_0.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_0.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_1.pgw b/data/release/coverages/mosaic_sample/global_mosaic_1.pgw
index 1b856af4dc5..48c4bb19a00 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_1.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_1.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-9.271843573824427
-38.491372862272705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+9.271843573824427
+38.491372862272705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_1.prj b/data/release/coverages/mosaic_sample/global_mosaic_1.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_1.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_1.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_10.pgw b/data/release/coverages/mosaic_sample/global_mosaic_10.pgw
index 6f402ef4ea3..86d39348241 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_10.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_10.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-6.375141924963005
-42.530970923550704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+6.375141924963005
+42.530970923550704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_10.prj b/data/release/coverages/mosaic_sample/global_mosaic_10.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_10.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_10.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_11.pgw b/data/release/coverages/mosaic_sample/global_mosaic_11.pgw
index 49570a2fa4e..d0cb1c6d0be 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_11.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_11.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-9.271843573824427
-42.530970923550704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+9.271843573824427
+42.530970923550704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_11.prj b/data/release/coverages/mosaic_sample/global_mosaic_11.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_11.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_11.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_12.pgw b/data/release/coverages/mosaic_sample/global_mosaic_12.pgw
index dbaa7ba2ce5..66e4e418036 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_12.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_12.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-12.168545222685848
-42.530970923550704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+12.168545222685848
+42.530970923550704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_12.prj b/data/release/coverages/mosaic_sample/global_mosaic_12.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_12.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_12.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_13.pgw b/data/release/coverages/mosaic_sample/global_mosaic_13.pgw
index 5ae587d3899..a6008d0f10b 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_13.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_13.pgw
@@ -1,6 +1,6 @@
-0.0579340329772284
-0.0
-0.0
--0.04039598061277999
-15.06524687154727
-42.530970923550704
+0.0579340329772284
+0.0
+0.0
+-0.04039598061277999
+15.06524687154727
+42.530970923550704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_13.prj b/data/release/coverages/mosaic_sample/global_mosaic_13.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_13.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_13.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_14.pgw b/data/release/coverages/mosaic_sample/global_mosaic_14.pgw
index a2ead6af5fc..40b265e573e 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_14.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_14.pgw
@@ -1,6 +1,6 @@
-0.05793403297722847
-0.0
-0.0
--0.04039598061277999
-17.96194852040869
-42.530970923550704
+0.05793403297722847
+0.0
+0.0
+-0.04039598061277999
+17.96194852040869
+42.530970923550704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_14.prj b/data/release/coverages/mosaic_sample/global_mosaic_14.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_14.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_14.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_15.pgw b/data/release/coverages/mosaic_sample/global_mosaic_15.pgw
index 19ed00a97b9..451d2776769 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_15.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_15.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-6.375141924963005
-44.550769954189704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+6.375141924963005
+44.550769954189704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_15.prj b/data/release/coverages/mosaic_sample/global_mosaic_15.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_15.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_15.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_16.pgw b/data/release/coverages/mosaic_sample/global_mosaic_16.pgw
index bcf3f5e7f3a..eadd170f15d 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_16.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_16.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-9.271843573824427
-44.550769954189704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+9.271843573824427
+44.550769954189704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_16.prj b/data/release/coverages/mosaic_sample/global_mosaic_16.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_16.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_16.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_17.pgw b/data/release/coverages/mosaic_sample/global_mosaic_17.pgw
index 6e5b27ebbc3..4b6005c27de 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_17.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_17.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-12.168545222685848
-44.550769954189704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+12.168545222685848
+44.550769954189704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_17.prj b/data/release/coverages/mosaic_sample/global_mosaic_17.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_17.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_17.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_18.pgw b/data/release/coverages/mosaic_sample/global_mosaic_18.pgw
index 8bb26ac6a41..ee91f085bdd 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_18.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_18.pgw
@@ -1,6 +1,6 @@
-0.0579340329772284
-0.0
-0.0
--0.04039598061277999
-15.06524687154727
-44.550769954189704
+0.0579340329772284
+0.0
+0.0
+-0.04039598061277999
+15.06524687154727
+44.550769954189704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_18.prj b/data/release/coverages/mosaic_sample/global_mosaic_18.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_18.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_18.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_19.pgw b/data/release/coverages/mosaic_sample/global_mosaic_19.pgw
index 61f8ed61c88..febce4ea548 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_19.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_19.pgw
@@ -1,6 +1,6 @@
-0.05793403297722847
-0.0
-0.0
--0.04039598061277999
-17.96194852040869
-44.550769954189704
+0.05793403297722847
+0.0
+0.0
+-0.04039598061277999
+17.96194852040869
+44.550769954189704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_19.prj b/data/release/coverages/mosaic_sample/global_mosaic_19.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_19.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_19.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_2.pgw b/data/release/coverages/mosaic_sample/global_mosaic_2.pgw
index 34b05a51a52..3da5eae95a1 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_2.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_2.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-12.168545222685848
-38.491372862272705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+12.168545222685848
+38.491372862272705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_2.prj b/data/release/coverages/mosaic_sample/global_mosaic_2.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_2.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_2.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_20.pgw b/data/release/coverages/mosaic_sample/global_mosaic_20.pgw
index 5ab5e404b06..bbf96375cfa 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_20.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_20.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-6.375141924963005
-46.570568984828704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+6.375141924963005
+46.570568984828704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_20.prj b/data/release/coverages/mosaic_sample/global_mosaic_20.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_20.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_20.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_21.pgw b/data/release/coverages/mosaic_sample/global_mosaic_21.pgw
index b98048e2501..377d3a18c33 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_21.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_21.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-9.271843573824427
-46.570568984828704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+9.271843573824427
+46.570568984828704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_21.prj b/data/release/coverages/mosaic_sample/global_mosaic_21.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_21.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_21.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_22.pgw b/data/release/coverages/mosaic_sample/global_mosaic_22.pgw
index a9e22e22a53..5d3847a103b 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_22.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_22.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-12.168545222685848
-46.570568984828704
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+12.168545222685848
+46.570568984828704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_22.prj b/data/release/coverages/mosaic_sample/global_mosaic_22.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_22.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_22.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_23.pgw b/data/release/coverages/mosaic_sample/global_mosaic_23.pgw
index f8050467bdb..de6a430b7a4 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_23.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_23.pgw
@@ -1,6 +1,6 @@
-0.0579340329772284
-0.0
-0.0
--0.04039598061277999
-15.06524687154727
-46.570568984828704
+0.0579340329772284
+0.0
+0.0
+-0.04039598061277999
+15.06524687154727
+46.570568984828704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_23.prj b/data/release/coverages/mosaic_sample/global_mosaic_23.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_23.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_23.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_24.pgw b/data/release/coverages/mosaic_sample/global_mosaic_24.pgw
index 9e94502203b..8a1684e78ee 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_24.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_24.pgw
@@ -1,6 +1,6 @@
-0.05793403297722847
-0.0
-0.0
--0.04039598061277999
-17.96194852040869
-46.570568984828704
+0.05793403297722847
+0.0
+0.0
+-0.04039598061277999
+17.96194852040869
+46.570568984828704
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_24.prj b/data/release/coverages/mosaic_sample/global_mosaic_24.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_24.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_24.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_3.pgw b/data/release/coverages/mosaic_sample/global_mosaic_3.pgw
index 0a56cb45cab..c4e3bafa41b 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_3.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_3.pgw
@@ -1,6 +1,6 @@
-0.0579340329772284
-0.0
-0.0
--0.04039598061277999
-15.06524687154727
-38.491372862272705
+0.0579340329772284
+0.0
+0.0
+-0.04039598061277999
+15.06524687154727
+38.491372862272705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_3.prj b/data/release/coverages/mosaic_sample/global_mosaic_3.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_3.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_3.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_4.pgw b/data/release/coverages/mosaic_sample/global_mosaic_4.pgw
index ec6a6a854fa..47fffc86cec 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_4.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_4.pgw
@@ -1,6 +1,6 @@
-0.05793403297722847
-0.0
-0.0
--0.04039598061277999
-17.96194852040869
-38.491372862272705
+0.05793403297722847
+0.0
+0.0
+-0.04039598061277999
+17.96194852040869
+38.491372862272705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_4.prj b/data/release/coverages/mosaic_sample/global_mosaic_4.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_4.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_4.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_5.pgw b/data/release/coverages/mosaic_sample/global_mosaic_5.pgw
index 16516b5338f..7444b35b873 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_5.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_5.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-6.375141924963005
-40.511171892911705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+6.375141924963005
+40.511171892911705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_5.prj b/data/release/coverages/mosaic_sample/global_mosaic_5.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_5.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_5.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_6.pgw b/data/release/coverages/mosaic_sample/global_mosaic_6.pgw
index 48f082649a8..018545b41fc 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_6.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_6.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-9.271843573824427
-40.511171892911705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+9.271843573824427
+40.511171892911705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_6.prj b/data/release/coverages/mosaic_sample/global_mosaic_6.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_6.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_6.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_7.pgw b/data/release/coverages/mosaic_sample/global_mosaic_7.pgw
index dfcdf8de663..9b70a4fdd16 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_7.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_7.pgw
@@ -1,6 +1,6 @@
-0.057934032977228433
-0.0
-0.0
--0.04039598061277999
-12.168545222685848
-40.511171892911705
+0.057934032977228433
+0.0
+0.0
+-0.04039598061277999
+12.168545222685848
+40.511171892911705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_7.prj b/data/release/coverages/mosaic_sample/global_mosaic_7.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_7.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_7.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_8.pgw b/data/release/coverages/mosaic_sample/global_mosaic_8.pgw
index bbd1b45809e..b50b21c14b3 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_8.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_8.pgw
@@ -1,6 +1,6 @@
-0.0579340329772284
-0.0
-0.0
--0.04039598061277999
-15.06524687154727
-40.511171892911705
+0.0579340329772284
+0.0
+0.0
+-0.04039598061277999
+15.06524687154727
+40.511171892911705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_8.prj b/data/release/coverages/mosaic_sample/global_mosaic_8.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_8.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_8.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_9.pgw b/data/release/coverages/mosaic_sample/global_mosaic_9.pgw
index 606f237eb54..12f9b43f205 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_9.pgw
+++ b/data/release/coverages/mosaic_sample/global_mosaic_9.pgw
@@ -1,6 +1,6 @@
-0.05793403297722847
-0.0
-0.0
--0.04039598061277999
-17.96194852040869
-40.511171892911705
+0.05793403297722847
+0.0
+0.0
+-0.04039598061277999
+17.96194852040869
+40.511171892911705
diff --git a/data/release/coverages/mosaic_sample/global_mosaic_9.prj b/data/release/coverages/mosaic_sample/global_mosaic_9.prj
index 60c0b45b355..7cb3bb426d9 100644
--- a/data/release/coverages/mosaic_sample/global_mosaic_9.prj
+++ b/data/release/coverages/mosaic_sample/global_mosaic_9.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/coverages/mosaic_sample/mosaic.prj b/data/release/coverages/mosaic_sample/mosaic.prj
index 187c589b0db..88167c9581a 100644
--- a/data/release/coverages/mosaic_sample/mosaic.prj
+++ b/data/release/coverages/mosaic_sample/mosaic.prj
@@ -1,9 +1,9 @@
-GEOGCS["WGS 84",
- DATUM["World Geodetic System 1984",
- SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
- AUTHORITY["EPSG","6326"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
+GEOGCS["WGS 84",
+ DATUM["World Geodetic System 1984",
+ SPHEROID["WGS 84", 6378137.0, 298.257223563, AUTHORITY["EPSG","7030"]],
+ AUTHORITY["EPSG","6326"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
AUTHORITY["EPSG","4326"]]
\ No newline at end of file
diff --git a/data/release/styles/burg.sld b/data/release/styles/burg.sld
index 29fc0bf54af..f0329472bbf 100644
--- a/data/release/styles/burg.sld
+++ b/data/release/styles/burg.sld
@@ -1,31 +1,31 @@
-
-
-
- redflag
-
- burg
- A small red flag
- A sample of how to use an SVG based symbolizer
-
-
-
- Red flag
-
-
-
-
- image/svg+xml
-
-
- 20
-
-
-
-
-
-
-
-
-
+
+
+
+ redflag
+
+ burg
+ A small red flag
+ A sample of how to use an SVG based symbolizer
+
+
+
+ Red flag
+
+
+
+
+ image/svg+xml
+
+
+ 20
+
+
+
+
+
+
+
+
+
diff --git a/doc/en/developer/source/programming-guide/app-schema/index.rst b/doc/en/developer/source/programming-guide/app-schema/index.rst
index 670955d4c8e..4a0cce27328 100644
--- a/doc/en/developer/source/programming-guide/app-schema/index.rst
+++ b/doc/en/developer/source/programming-guide/app-schema/index.rst
@@ -1,171 +1,171 @@
-.. _app-schema_online_tests:
-
-App-Schema Online Tests
-=======================
-
-The offline tests in app-schema-test suite use properties files as data source. In reality, properties files are only used as testing means, whereas in production, users would use databases as data source. Users would often encounter problems/bugs that cannot be recreated using properties files, which raises the need to run with a test database. Moreover, Niels' joining support to increase performance can only be tested online, and we need to ensure that it works with the current features/bug fixes covered in the tests.
-
-Prerequisites
--------------
-
-This requires installation of Oracle driver in Maven repository::
-
- mvn install:install-file -Dfile=ojdbc7.jar -DgroupId=com.oracle -DartifactId=ojdbc7 -Dversion=12.1.0.2 -Dpackaging=jar
-
-You would also need to have test databases for both Oracle and Postgis. Then follow these steps:
-
-* Create oracle.properties and postgis.properties in {user directory}/.geoserver directory.
-
-* Populate each properties file with database details, e.g.::
-
- password=onlinetestuser
-
- passwd=onlinetestuser
-
- user=onlinetestuser
-
- port=5432
-
- url=jdbc\:postgresql\://localhost:5432/onlinetest
-
- host=localhost
-
- database=onlinetest
-
- driver=org.postgresql.Driver
-
- dbtype=postgisng
-
-Running tests from Maven
-------------------------
-
-Without specifying any profile, the default Maven configuration for app-schema-test is to run offline tests only.
-
-To run online tests, enable the profile::
-
- -Papp-schema-online-test
-
-This profile enables the data reference set tests and offline tests to run online. Data reference set tests are online tests based on data and use cases from GeoScience Victoria. Each is explicit for a database type (Oracle and Postgis) and has a copy to run with joining enabled.
-
-The offline tests are configured to run online with joining through separate modules for each database: app-schema-oracle-test and app-schema-postgis-test. These modules are placeholders for pom.xml files containing database specific parameters. This makes it easy to identify when a test fails with a particular database when running from Maven/buildbot.
-
-Memory requirements
-```````````````````
-
-The online tests require more memory than usual, so specifying the usual -Dtest.maxHeapSize=256m is not enough. Specify --Dtest.maxHeapSize=1024m instead.
-
-When the build is successful, you would see this in the "Reactor Summary"::
-
- [INFO] Application Schema Integration Online Test with Oracle Database SUCCESS [5:52.980s]
- [INFO] Application Schema Integration Online Test with Postgis Database SUCCESS [1:42.428s]
-
-Running tests from JUnit
-------------------------
-
-There is no need to import the online test modules as they are empty and you cannot run the tests through them in Eclipse.
-
-To run offline tests (in app-schema-test/src/test/java/org/geoserver/test) with a test database,
-enable joining and specify the database. Add these parameters in VM Arguments for postgis::
-
- -Dapp-schema.joining=true -DtestDatabase=postgis -Xmx256m
-
-Similarly, to test with oracle::
-
- -Dapp-schema.joining=true -DtestDatabase=oracle -Xmx256m
-
-Additionally for Oracle, you also need to add ojdbc14.jar in the test Classpath.
-
-.. note:: Please note that you should only run the tests in org.geoserver.test package with the above parameters, since the data reference tests in org.geoserver.test.onlineTest package contain non-joining tests which would fail.
-
-You do not need to specify these VM Arguments for running data reference tests (in app-schema-test/src/test/java/org/geoserver/test/onlineTest). However, you would still need to specify the Oracle JDBC driver in the Classpath for Oracle specific tests. Data reference tests package also requires 768m memory to run from JUnit.
-
-Adding new tests
-----------------
-
-When adding new tests to app-schema-test suite (except for onlineTest package for data reference tests), please note the following:
-
-Test offline only
-`````````````````
-
-If your test is a special case and does not need to be tested online, exclude them in both app-schema-oracle-test and app-schema-postgis-test pom.xml and ignore the points beyond this. Otherwise, read on.
-
-idExpression
-````````````
-
-If your test database does not use primary keys, ensure idExpression is specified for the top level element in your mapping file.
-
-Multi-valued properties ordering
-````````````````````````````````
-
-When testing multi-valued properties, the order of the values could vary depending on the data source type. To be safe, compare your values as a list, instead of evaluating individual xpath node against a single value for such properties. E.g.::
-
- List names = new ArrayList();
- names.add("New Group");
- names.add("-Xy");
- String name = evaluate("//gsml:MappedFeature[@gml:id='" + id
- + "']/gsml:specification/gsml:GeologicUnit/gml:name[1]", doc);
- assertTrue(names.contains(name));
- names.remove(name);
- name = evaluate("//gsml:MappedFeature[@gml:id='" + id
- + "']/gsml:specification/gsml:GeologicUnit/gml:name[2]", doc);
- assertTrue(names.contains(name));
- names.remove(name);
- assertTrue(names.isEmpty());
-
-This is because of the difference in the handling of queries with joining. Joining uses order by when querying tables. When the tests run offline, property data store returns data from properties file unordered.
-
-When joining is enabled:
-
-* If the multi-valued properties are not feature chained, the order is unpredictable.
-
-* If the multi-valued properties are feature chained, they are ordered by the foreign key used in feature chaining.
-
-Column names in upper case
-``````````````````````````
-
-Ensure column names in mapping files are in upper case, even if they are in lower case in the properties file. This is to avoid failures with Oracle database, due to OracleDialect not wrapping names with escape characters. To work around this, the script for online tests creates the columns in upper case, therefore should be referred by with upper case.
-
-Functions in feature chaining
-`````````````````````````````
-
-If using feature chaining, avoid using functions in sourceExpression for linking attributes, i.e. attribute used in both OCQL and linkField. This is because functions used in feature chaining are not supported with joining support.
-
-3D tests
-````````
-There are a number of tests that try out 3D features in App-schema. To run these as online tests against a postgis or oracle database, a number of prerequisites must be met.
-
-For PostGIS:
-
- * You must use postgis 2 to support 3D.
- * In your postgis, if it hasn't been done yet, this command must be executed to support srid 4979 (wgs84 with 3d)::
-
- INSERT into spatial_ref_sys (srid, auth_name, auth_srid, proj4text, srtext) values ( 4979, 'epsg', 4979, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ', 'GEOGCS["WGS 84",DATUM["World Geodetic System 1984",SPHEROID["WGS 84",6378137.0,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0.0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.017453292519943295],AXIS["Geodetic latitude",NORTH],AXIS["Geodetic longitude",EAST],AXIS["Ellipsoidal height",UP],AUTHORITY["EPSG","4979"]]');
-
-
-For Oracle:
-
- * You must use Oracle 11g Release 2, preferably the latest version that can be downloaded for best 3D support
- * Oracle does NOT support WKT parsing of 3d geometries, so some extra DBA work is needed to set this up. Otherwise the online tests, which rely on WKT to enter data in the database, will fail.
-
- You need the following package 'SC4O' (Spatial Companion for Oracle), created Simon Greener: download at http://www.spatialdbadvisor.com/files/SC4O.zip.
- It has an installation script for linux and windows that must be run from the server that runs oracle. The package will provide JTS functionality that can be called from PL/SQL.
-
- If the online test user is different from the user used for the installation of the package, the online test user must be given permission to use the package.
- You must also execute as an admin user the following command (with 'onlinetestuser' being the online test user)::
-
- CALL DBMS_JAVA.GRANT_PERMISSION('onlinetestuser','java.lang.RuntimePermission','getClassLoader','');
-
- Afterwards, you have to specify the user where the SC4O package was installed to the online testing system. You do this by specifying the system property -DSC4OUser. If it is the same as the online test user, you can omit this parameter.
- The online test will use the JTS method for wkt parsing (ST_GeomFromEWKT) rather than the regular oracle method SDO_GEOMETRY.
- For example, I installed the package using the System user. Then I gave onlinetestuser permission to execute it.
- I run the tests with -DSC4OUser=System so it knows to use the System.SC4O.ST_GeomFromEWKT method.
-
-Running MongoDB Online Tests
-----------------------------
-
-MongoDB online tests are activated by the ``app-schema-online-test`` profile and will run if configuration file ``{user directory}/.geoserver/mongodb.properties`` is available. If the configuration file is not available an example file will be created and tests will be skipped. The content of the configuration file should look like this::
-
- mongo.port=27017
- mongo.host=127.0.0.1
-
+.. _app-schema_online_tests:
+
+App-Schema Online Tests
+=======================
+
+The offline tests in app-schema-test suite use properties files as data source. In reality, properties files are only used as testing means, whereas in production, users would use databases as data source. Users would often encounter problems/bugs that cannot be recreated using properties files, which raises the need to run with a test database. Moreover, Niels' joining support to increase performance can only be tested online, and we need to ensure that it works with the current features/bug fixes covered in the tests.
+
+Prerequisites
+-------------
+
+This requires installation of Oracle driver in Maven repository::
+
+ mvn install:install-file -Dfile=ojdbc7.jar -DgroupId=com.oracle -DartifactId=ojdbc7 -Dversion=12.1.0.2 -Dpackaging=jar
+
+You would also need to have test databases for both Oracle and Postgis. Then follow these steps:
+
+* Create oracle.properties and postgis.properties in {user directory}/.geoserver directory.
+
+* Populate each properties file with database details, e.g.::
+
+ password=onlinetestuser
+
+ passwd=onlinetestuser
+
+ user=onlinetestuser
+
+ port=5432
+
+ url=jdbc\:postgresql\://localhost:5432/onlinetest
+
+ host=localhost
+
+ database=onlinetest
+
+ driver=org.postgresql.Driver
+
+ dbtype=postgisng
+
+Running tests from Maven
+------------------------
+
+Without specifying any profile, the default Maven configuration for app-schema-test is to run offline tests only.
+
+To run online tests, enable the profile::
+
+ -Papp-schema-online-test
+
+This profile enables the data reference set tests and offline tests to run online. Data reference set tests are online tests based on data and use cases from GeoScience Victoria. Each is explicit for a database type (Oracle and Postgis) and has a copy to run with joining enabled.
+
+The offline tests are configured to run online with joining through separate modules for each database: app-schema-oracle-test and app-schema-postgis-test. These modules are placeholders for pom.xml files containing database specific parameters. This makes it easy to identify when a test fails with a particular database when running from Maven/buildbot.
+
+Memory requirements
+```````````````````
+
+The online tests require more memory than usual, so specifying the usual -Dtest.maxHeapSize=256m is not enough. Specify --Dtest.maxHeapSize=1024m instead.
+
+When the build is successful, you would see this in the "Reactor Summary"::
+
+ [INFO] Application Schema Integration Online Test with Oracle Database SUCCESS [5:52.980s]
+ [INFO] Application Schema Integration Online Test with Postgis Database SUCCESS [1:42.428s]
+
+Running tests from JUnit
+------------------------
+
+There is no need to import the online test modules as they are empty and you cannot run the tests through them in Eclipse.
+
+To run offline tests (in app-schema-test/src/test/java/org/geoserver/test) with a test database,
+enable joining and specify the database. Add these parameters in VM Arguments for postgis::
+
+ -Dapp-schema.joining=true -DtestDatabase=postgis -Xmx256m
+
+Similarly, to test with oracle::
+
+ -Dapp-schema.joining=true -DtestDatabase=oracle -Xmx256m
+
+Additionally for Oracle, you also need to add ojdbc14.jar in the test Classpath.
+
+.. note:: Please note that you should only run the tests in org.geoserver.test package with the above parameters, since the data reference tests in org.geoserver.test.onlineTest package contain non-joining tests which would fail.
+
+You do not need to specify these VM Arguments for running data reference tests (in app-schema-test/src/test/java/org/geoserver/test/onlineTest). However, you would still need to specify the Oracle JDBC driver in the Classpath for Oracle specific tests. Data reference tests package also requires 768m memory to run from JUnit.
+
+Adding new tests
+----------------
+
+When adding new tests to app-schema-test suite (except for onlineTest package for data reference tests), please note the following:
+
+Test offline only
+`````````````````
+
+If your test is a special case and does not need to be tested online, exclude them in both app-schema-oracle-test and app-schema-postgis-test pom.xml and ignore the points beyond this. Otherwise, read on.
+
+idExpression
+````````````
+
+If your test database does not use primary keys, ensure idExpression is specified for the top level element in your mapping file.
+
+Multi-valued properties ordering
+````````````````````````````````
+
+When testing multi-valued properties, the order of the values could vary depending on the data source type. To be safe, compare your values as a list, instead of evaluating individual xpath node against a single value for such properties. E.g.::
+
+ List names = new ArrayList();
+ names.add("New Group");
+ names.add("-Xy");
+ String name = evaluate("//gsml:MappedFeature[@gml:id='" + id
+ + "']/gsml:specification/gsml:GeologicUnit/gml:name[1]", doc);
+ assertTrue(names.contains(name));
+ names.remove(name);
+ name = evaluate("//gsml:MappedFeature[@gml:id='" + id
+ + "']/gsml:specification/gsml:GeologicUnit/gml:name[2]", doc);
+ assertTrue(names.contains(name));
+ names.remove(name);
+ assertTrue(names.isEmpty());
+
+This is because of the difference in the handling of queries with joining. Joining uses order by when querying tables. When the tests run offline, property data store returns data from properties file unordered.
+
+When joining is enabled:
+
+* If the multi-valued properties are not feature chained, the order is unpredictable.
+
+* If the multi-valued properties are feature chained, they are ordered by the foreign key used in feature chaining.
+
+Column names in upper case
+``````````````````````````
+
+Ensure column names in mapping files are in upper case, even if they are in lower case in the properties file. This is to avoid failures with Oracle database, due to OracleDialect not wrapping names with escape characters. To work around this, the script for online tests creates the columns in upper case, therefore should be referred by with upper case.
+
+Functions in feature chaining
+`````````````````````````````
+
+If using feature chaining, avoid using functions in sourceExpression for linking attributes, i.e. attribute used in both OCQL and linkField. This is because functions used in feature chaining are not supported with joining support.
+
+3D tests
+````````
+There are a number of tests that try out 3D features in App-schema. To run these as online tests against a postgis or oracle database, a number of prerequisites must be met.
+
+For PostGIS:
+
+ * You must use postgis 2 to support 3D.
+ * In your postgis, if it hasn't been done yet, this command must be executed to support srid 4979 (wgs84 with 3d)::
+
+ INSERT into spatial_ref_sys (srid, auth_name, auth_srid, proj4text, srtext) values ( 4979, 'epsg', 4979, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ', 'GEOGCS["WGS 84",DATUM["World Geodetic System 1984",SPHEROID["WGS 84",6378137.0,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0.0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.017453292519943295],AXIS["Geodetic latitude",NORTH],AXIS["Geodetic longitude",EAST],AXIS["Ellipsoidal height",UP],AUTHORITY["EPSG","4979"]]');
+
+
+For Oracle:
+
+ * You must use Oracle 11g Release 2, preferably the latest version that can be downloaded for best 3D support
+ * Oracle does NOT support WKT parsing of 3d geometries, so some extra DBA work is needed to set this up. Otherwise the online tests, which rely on WKT to enter data in the database, will fail.
+
+ You need the following package 'SC4O' (Spatial Companion for Oracle), created Simon Greener: download at http://www.spatialdbadvisor.com/files/SC4O.zip.
+ It has an installation script for linux and windows that must be run from the server that runs oracle. The package will provide JTS functionality that can be called from PL/SQL.
+
+ If the online test user is different from the user used for the installation of the package, the online test user must be given permission to use the package.
+ You must also execute as an admin user the following command (with 'onlinetestuser' being the online test user)::
+
+ CALL DBMS_JAVA.GRANT_PERMISSION('onlinetestuser','java.lang.RuntimePermission','getClassLoader','');
+
+ Afterwards, you have to specify the user where the SC4O package was installed to the online testing system. You do this by specifying the system property -DSC4OUser. If it is the same as the online test user, you can omit this parameter.
+ The online test will use the JTS method for wkt parsing (ST_GeomFromEWKT) rather than the regular oracle method SDO_GEOMETRY.
+ For example, I installed the package using the System user. Then I gave onlinetestuser permission to execute it.
+ I run the tests with -DSC4OUser=System so it knows to use the System.SC4O.ST_GeomFromEWKT method.
+
+Running MongoDB Online Tests
+----------------------------
+
+MongoDB online tests are activated by the ``app-schema-online-test`` profile and will run if configuration file ``{user directory}/.geoserver/mongodb.properties`` is available. If the configuration file is not available an example file will be created and tests will be skipped. The content of the configuration file should look like this::
+
+ mongo.port=27017
+ mongo.host=127.0.0.1
+
During the tests a new database will be created in MongoDB and when the tests end that database will be removed.
\ No newline at end of file
diff --git a/doc/en/developer/source/programming-guide/index.rst b/doc/en/developer/source/programming-guide/index.rst
index 52084f19688..fe6c2c8425a 100644
--- a/doc/en/developer/source/programming-guide/index.rst
+++ b/doc/en/developer/source/programming-guide/index.rst
@@ -1,18 +1,18 @@
-.. _programming_guide:
-
-Programming Guide
-=================
-
-.. toctree::
- :maxdepth: 1
-
- ows-services/index.rst
- rest-services/index.rst
- web-ui/index.rst
- wicket-pages/index.rst
- extension-points/index.rst
- wps-services/index.rst
- testing/index.rst
- security/index.rst
- app-schema/index.rst
-
+.. _programming_guide:
+
+Programming Guide
+=================
+
+.. toctree::
+ :maxdepth: 1
+
+ ows-services/index.rst
+ rest-services/index.rst
+ web-ui/index.rst
+ wicket-pages/index.rst
+ extension-points/index.rst
+ wps-services/index.rst
+ testing/index.rst
+ security/index.rst
+ app-schema/index.rst
+
diff --git a/doc/en/user/source/community/csw-iso/index.rst b/doc/en/user/source/community/csw-iso/index.rst
index 890bb68e972..743af30832e 100644
--- a/doc/en/user/source/community/csw-iso/index.rst
+++ b/doc/en/user/source/community/csw-iso/index.rst
@@ -1,13 +1,13 @@
-.. _csw_iso:
-
-Catalog Services for the Web (CSW) - ISO Metadata Profile
-=========================================================
-
-This section discusses the Catalog Services for Web (CSW) ISO Metadata Profile community module. With this community module on top of the general :ref:`csw` extension, GeoServer supports the ISO Metadata Profile as an additional scheme for the CSW service.
-
-.. toctree::
- :maxdepth: 2
-
- installing
- mapping
- tutorial
+.. _csw_iso:
+
+Catalog Services for the Web (CSW) - ISO Metadata Profile
+=========================================================
+
+This section discusses the Catalog Services for Web (CSW) ISO Metadata Profile community module. With this community module on top of the general :ref:`csw` extension, GeoServer supports the ISO Metadata Profile as an additional scheme for the CSW service.
+
+.. toctree::
+ :maxdepth: 2
+
+ installing
+ mapping
+ tutorial
diff --git a/doc/en/user/source/community/csw-iso/tutorial.rst b/doc/en/user/source/community/csw-iso/tutorial.rst
index 8d2f32c510c..15044d06bc2 100644
--- a/doc/en/user/source/community/csw-iso/tutorial.rst
+++ b/doc/en/user/source/community/csw-iso/tutorial.rst
@@ -1,174 +1,174 @@
-.. _csw_iso_tutorial:
-
-Catalog Services for the Web (CSW) ISO Metadata tutorial
-========================================================
-
-This tutorial will show how to use the CSW module with the ISO Metadata Profile scheme. It assumes a fresh installation of GeoServer with the :ref:`CSW ISO Metadata Profile module installed `.
-
-Configuration
--------------
-
-In the :file:`/csw` directory, create a new file named :file:`MD_Metadata.properties` (ISO Metadata Profile mapping file) with the following contents::
-
- @fileIdentifier.CharacterString=prefixedName
- identificationInfo.AbstractMD_Identification.citation.CI_Citation.title.CharacterString=title
- identificationInfo.AbstractMD_Identification.descriptiveKeywords.MD_Keywords.keyword.CharacterString=keywords
- identificationInfo.AbstractMD_Identification.abstract.CharacterString=abstract
- $dateStamp.Date= if_then_else ( isNull("metadata.date") , 'Unknown', "metadata.date")
- hierarchyLevel.MD_ScopeCode.@codeListValue='http://purl.org/dc/dcmitype/Dataset'
- $contact.CI_ResponsibleParty.individualName.CharacterString='John Smith'
-
-Services
---------
-
-With GeoServer running (and responding on ``http://localhost:8080``), test GeoServer CSW in a web browser by querying the CSW capabilities as follows::
-
- http://localhost:8080/geoserver/csw?service=csw&version=2.0.2&request=GetCapabilities
-
-We can request a description of our Metadata record::
-
- http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=DescribeRecord&typeName=gmd:MD_Metadata
-
-This yields the following result::
-
-
-
-
-
-
-
- Geographic MetaData (GMD) extensible markup language is a component of the XML Schema Implementation of Geographic Information Metadata documented in ISO/TS 19139:2007. GMD includes all the definitions of http://www.isotc211.org/2005/gmd namespace. The root document of this namespace is the file gmd.xsd. This identification.xsd schema implements the UML conceptual schema defined in A.2.2 of ISO 19115:2003. It contains the implementation of the following classes: MD_Identification, MD_BrowseGraphic, MD_DataIdentification, MD_ServiceIdentification, MD_RepresentativeFraction, MD_Usage, MD_Keywords, DS_Association, MD_AggregateInformation, MD_CharacterSetCode, MD_SpatialRepresentationTypeCode, MD_TopicCategoryCode, MD_ProgressCode, MD_KeywordTypeCode, DS_AssociationTypeCode, DS_InitiativeTypeCode, MD_ResolutionType.
-
- ...
-
-Query all layers as follows::
-
- http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecords&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd
-
-Request a particular layer by ID...::
-
- http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecordById&elementsetname=summary&id=CoverageInfoImpl--4a9eec43:132d48aac79:-8000&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd
-
-...or use a filter to retrieve it by Title::
-
- http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecords&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd&constraint=Title=%27mosaic%27
-
-Either case should return::
-
-
-
-
-
-
-
- CoverageInfoImpl--4a9eec43:132d48aac79:-8000
-
-
- Unknown
-
-
-
-
-
-
-
- 36.492
- 6.346
- 46.591
- 20.83
-
-
-
-
-
-
-
-
-
- mosaic
-
-
-
-
-
-
- WCS
-
-
- ImageMosaic
-
-
- mosaic
-
-
-
-
-
-
-
-
- John Smith
-
-
-
-
-
-
-
-
-
-
-We can request the domain of a property. For example, all values of "Title"::
-
- http://localhost:8080/geoserver/csw?service=csw&version=2.0.2&request=GetDomain&propertyName=Title
-
-This should yield the following result::
-
-
-
-
- Title
-
- A sample ArcGrid file
- Manhattan (NY) landmarks
- Manhattan (NY) points of interest
- Manhattan (NY) roads
- North America sample imagery
- Pk50095 is a A raster file accompanied by a spatial data file
- Spearfish archeological sites
- Spearfish bug locations
- Spearfish restricted areas
- Spearfish roads
- Spearfish streams
- Tasmania cities
- Tasmania roads
- Tasmania state boundaries
- Tasmania water bodies
- USA Population
- World rectangle
- mosaic
- sfdem is a Tagged Image File Format with Geographic information
-
-
-
-
-To request more than the first 10 records or for more complex queries you can use a HTTP POST request with an XML query as the request body. For example, using the maxRecords option in the following request it is possible to return the first 50 layers with "ImageMosaic" in a keyword::
-
- http://localhost:8080/geoserver/csw
-
-Postbody::
-
-
-
-
- full
-
-
-
- dc:subject
- %ImageMosaic%
-
-
-
-
-
+.. _csw_iso_tutorial:
+
+Catalog Services for the Web (CSW) ISO Metadata tutorial
+========================================================
+
+This tutorial will show how to use the CSW module with the ISO Metadata Profile scheme. It assumes a fresh installation of GeoServer with the :ref:`CSW ISO Metadata Profile module installed `.
+
+Configuration
+-------------
+
+In the :file:`/csw` directory, create a new file named :file:`MD_Metadata.properties` (ISO Metadata Profile mapping file) with the following contents::
+
+ @fileIdentifier.CharacterString=prefixedName
+ identificationInfo.AbstractMD_Identification.citation.CI_Citation.title.CharacterString=title
+ identificationInfo.AbstractMD_Identification.descriptiveKeywords.MD_Keywords.keyword.CharacterString=keywords
+ identificationInfo.AbstractMD_Identification.abstract.CharacterString=abstract
+ $dateStamp.Date= if_then_else ( isNull("metadata.date") , 'Unknown', "metadata.date")
+ hierarchyLevel.MD_ScopeCode.@codeListValue='http://purl.org/dc/dcmitype/Dataset'
+ $contact.CI_ResponsibleParty.individualName.CharacterString='John Smith'
+
+Services
+--------
+
+With GeoServer running (and responding on ``http://localhost:8080``), test GeoServer CSW in a web browser by querying the CSW capabilities as follows::
+
+ http://localhost:8080/geoserver/csw?service=csw&version=2.0.2&request=GetCapabilities
+
+We can request a description of our Metadata record::
+
+ http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=DescribeRecord&typeName=gmd:MD_Metadata
+
+This yields the following result::
+
+
+
+
+
+
+
+ Geographic MetaData (GMD) extensible markup language is a component of the XML Schema Implementation of Geographic Information Metadata documented in ISO/TS 19139:2007. GMD includes all the definitions of http://www.isotc211.org/2005/gmd namespace. The root document of this namespace is the file gmd.xsd. This identification.xsd schema implements the UML conceptual schema defined in A.2.2 of ISO 19115:2003. It contains the implementation of the following classes: MD_Identification, MD_BrowseGraphic, MD_DataIdentification, MD_ServiceIdentification, MD_RepresentativeFraction, MD_Usage, MD_Keywords, DS_Association, MD_AggregateInformation, MD_CharacterSetCode, MD_SpatialRepresentationTypeCode, MD_TopicCategoryCode, MD_ProgressCode, MD_KeywordTypeCode, DS_AssociationTypeCode, DS_InitiativeTypeCode, MD_ResolutionType.
+
+ ...
+
+Query all layers as follows::
+
+ http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecords&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd
+
+Request a particular layer by ID...::
+
+ http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecordById&elementsetname=summary&id=CoverageInfoImpl--4a9eec43:132d48aac79:-8000&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd
+
+...or use a filter to retrieve it by Title::
+
+ http://localhost:8080/geoserver/csw?service=CSW&version=2.0.2&request=GetRecords&typeNames=gmd:MD_Metadata&resultType=results&elementSetName=full&outputSchema=http://www.isotc211.org/2005/gmd&constraint=Title=%27mosaic%27
+
+Either case should return::
+
+
+
+
+
+
+
+ CoverageInfoImpl--4a9eec43:132d48aac79:-8000
+
+
+ Unknown
+
+
+
+
+
+
+
+ 36.492
+ 6.346
+ 46.591
+ 20.83
+
+
+
+
+
+
+
+
+
+ mosaic
+
+
+
+
+
+
+ WCS
+
+
+ ImageMosaic
+
+
+ mosaic
+
+
+
+
+
+
+
+
+ John Smith
+
+
+
+
+
+
+
+
+
+
+We can request the domain of a property. For example, all values of "Title"::
+
+ http://localhost:8080/geoserver/csw?service=csw&version=2.0.2&request=GetDomain&propertyName=Title
+
+This should yield the following result::
+
+
+
+
+ Title
+
+ A sample ArcGrid file
+ Manhattan (NY) landmarks
+ Manhattan (NY) points of interest
+ Manhattan (NY) roads
+ North America sample imagery
+ Pk50095 is a A raster file accompanied by a spatial data file
+ Spearfish archeological sites
+ Spearfish bug locations
+ Spearfish restricted areas
+ Spearfish roads
+ Spearfish streams
+ Tasmania cities
+ Tasmania roads
+ Tasmania state boundaries
+ Tasmania water bodies
+ USA Population
+ World rectangle
+ mosaic
+ sfdem is a Tagged Image File Format with Geographic information
+
+
+
+
+To request more than the first 10 records or for more complex queries you can use a HTTP POST request with an XML query as the request body. For example, using the maxRecords option in the following request it is possible to return the first 50 layers with "ImageMosaic" in a keyword::
+
+ http://localhost:8080/geoserver/csw
+
+Postbody::
+
+
+
+
+ full
+
+
+
+ dc:subject
+ %ImageMosaic%
+
+
+
+
+
diff --git a/doc/en/user/source/community/dds/index.rst b/doc/en/user/source/community/dds/index.rst
index 4059bd73696..f76a68e0215 100644
--- a/doc/en/user/source/community/dds/index.rst
+++ b/doc/en/user/source/community/dds/index.rst
@@ -1,70 +1,70 @@
-.. _community_dds:
-
-DDS/BIL(World Wind Data Formats) Extension
-==========================================
-
-This output module allows GeoServer to output imagery and terrain in formats
-understood by `NASA World Wind `_. The
-mime-types supported are:
-
- #. Direct Draw Surface (DDS) - image/dds. This format allows efficient loading of textures to the GPU and takes the task off the WorldWind client CPU in converting downloaded PNG, JPEG or TIFF tiles. The DDS compression is done using `DXT3 `_ with help from the worldwind library on server side.
-
- #. Binary Interleaved by Line(BIL) - image/bil. This is actually a very simple raw binary format produced using the `RAW Image Writer `_. The supplied GridCoverage2D undergoes appropriate subsampling, reprojection and bit-depth conversion. The output can be requested as 16bit Int or 32bit Float.
-
-
-Installing the DDS/BIL extension
------------------------------------
-
- #. Download the DDS/BIL extension from the `nightly GeoServer community module builds `_. A prebuilt version for GeoServer 2.0.x can be found on Jira - :geos:`3586`.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
- #. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Checking if the extension is enabled
-------------------------------------
-
-Once the extension is installed, the provided mime-types should appear in the layer preview dropbox as shown:
-
-.. figure:: images/bil_dds.jpg
- :align: center
-
-The mime-types will also be listed in the ``GetCapabilities`` document::
-
-image/bil
-image/dds
-
-Configuring the BIL format
-------------------------------------
-
-For a client application to use a BIL layer, it must know the data encoding of the BIL file (e.g. 16-bit integer, 32-bit floating point, etc), the byte order of the data, and the value that indicates missing data. BIL files do not contain this metadata, so it may be necessary to configure the server to produce BIL files in the format that a client application expects.
-
-.. figure:: images/bil_config.png
- :align: center
-
-The BIL output format can be configured for each layer in the Publishing tab of the layer configuration. The plugin supports the following options:
-
-.. list-table::
- :widths: 50 50
-
- * - **Option**
- - **Description**
- * - ``Default encoding``
- - The data encoding to use if the request does not specify an encoding. For example, application/bil does not specify the response encoding, while application/bil16 does specify an encoding. Default: use same encoding as layer source files.
- * - ``Byte order``
- - Byte order of the response. Default: network byte order (big endian).
- * - ``No Data value``
- - The value that indicates missing data. If this option is set, missing data values will be recoded to this value. Default: no data translation.
-
-For compatibility with the default behavior of NASA World Wind, use these settings:
-
-* Default encoding: application/bil16
-* Byte order: Little endian
-* No data: -9999
-
-Configuring World Wind to access Imagery/Terrain from GeoServer
----------------------------------------------------------------
-
-Please refer to the `WorldWind Forums `_ for instructions on how to setup World Wind to work with layers
-published via GeoServer. For image layers(DDS) the user need to create a `WMSTiledImageLayer `_ either via XML configuration or programmatically.
-For terrain layers (BIL) the equivalent class is `WMSBasicElevationModel `_.
+.. _community_dds:
+
+DDS/BIL(World Wind Data Formats) Extension
+==========================================
+
+This output module allows GeoServer to output imagery and terrain in formats
+understood by `NASA World Wind `_. The
+mime-types supported are:
+
+ #. Direct Draw Surface (DDS) - image/dds. This format allows efficient loading of textures to the GPU and takes the task off the WorldWind client CPU in converting downloaded PNG, JPEG or TIFF tiles. The DDS compression is done using `DXT3 `_ with help from the worldwind library on server side.
+
+ #. Binary Interleaved by Line(BIL) - image/bil. This is actually a very simple raw binary format produced using the `RAW Image Writer `_. The supplied GridCoverage2D undergoes appropriate subsampling, reprojection and bit-depth conversion. The output can be requested as 16bit Int or 32bit Float.
+
+
+Installing the DDS/BIL extension
+-----------------------------------
+
+ #. Download the DDS/BIL extension from the `nightly GeoServer community module builds `_. A prebuilt version for GeoServer 2.0.x can be found on Jira - :geos:`3586`.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+ #. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Checking if the extension is enabled
+------------------------------------
+
+Once the extension is installed, the provided mime-types should appear in the layer preview dropbox as shown:
+
+.. figure:: images/bil_dds.jpg
+ :align: center
+
+The mime-types will also be listed in the ``GetCapabilities`` document::
+
+image/bil
+image/dds
+
+Configuring the BIL format
+------------------------------------
+
+For a client application to use a BIL layer, it must know the data encoding of the BIL file (e.g. 16-bit integer, 32-bit floating point, etc), the byte order of the data, and the value that indicates missing data. BIL files do not contain this metadata, so it may be necessary to configure the server to produce BIL files in the format that a client application expects.
+
+.. figure:: images/bil_config.png
+ :align: center
+
+The BIL output format can be configured for each layer in the Publishing tab of the layer configuration. The plugin supports the following options:
+
+.. list-table::
+ :widths: 50 50
+
+ * - **Option**
+ - **Description**
+ * - ``Default encoding``
+ - The data encoding to use if the request does not specify an encoding. For example, application/bil does not specify the response encoding, while application/bil16 does specify an encoding. Default: use same encoding as layer source files.
+ * - ``Byte order``
+ - Byte order of the response. Default: network byte order (big endian).
+ * - ``No Data value``
+ - The value that indicates missing data. If this option is set, missing data values will be recoded to this value. Default: no data translation.
+
+For compatibility with the default behavior of NASA World Wind, use these settings:
+
+* Default encoding: application/bil16
+* Byte order: Little endian
+* No data: -9999
+
+Configuring World Wind to access Imagery/Terrain from GeoServer
+---------------------------------------------------------------
+
+Please refer to the `WorldWind Forums `_ for instructions on how to setup World Wind to work with layers
+published via GeoServer. For image layers(DDS) the user need to create a `WMSTiledImageLayer `_ either via XML configuration or programmatically.
+For terrain layers (BIL) the equivalent class is `WMSBasicElevationModel `_.
diff --git a/doc/en/user/source/community/flatgeobuf/index.rst b/doc/en/user/source/community/flatgeobuf/index.rst
index f246a42b82f..6c5b9634622 100644
--- a/doc/en/user/source/community/flatgeobuf/index.rst
+++ b/doc/en/user/source/community/flatgeobuf/index.rst
@@ -1,11 +1,11 @@
-.. _flatgeobuf:
-
-WFS FlatGeobuf output format
-============================
-
-This section discusses the WFS FlatGeobuf output format.
-
-.. toctree::
- :maxdepth: 2
-
- installing
+.. _flatgeobuf:
+
+WFS FlatGeobuf output format
+============================
+
+This section discusses the WFS FlatGeobuf output format.
+
+.. toctree::
+ :maxdepth: 2
+
+ installing
diff --git a/doc/en/user/source/community/gdal/index.rst b/doc/en/user/source/community/gdal/index.rst
index 09b7ab34bc4..6f0d10778fe 100644
--- a/doc/en/user/source/community/gdal/index.rst
+++ b/doc/en/user/source/community/gdal/index.rst
@@ -1,125 +1,125 @@
-.. _gdal_wcs_output_format:
-
-GDAL based WCS Output Format
-============================
-
-The gdal_translate based output format leverages the availability of the gdal_translate command to allow the generation of more output formats than GeoServer can natively produce.
-The basic idea is to dump to the file system a file that gdal_translate can translate, invoke it, zip and return the output of the translation.
-
-This extension is thus the equivalent of the :ref:`OGR extension ` for raster data.
-
-
-Out of the box behaviour
-------------------------
-
-Out of the box the plugin assumes the following:
-
-* gdal_translate is available in the path
-* the GDAL_DATA variable is pointing to the GDAL data directory (which stores the spatial reference information for GDAL)
-
-In the default configuration the following formats are supported:
-
-* JPEG-2000 part 1 (ISO/IEC 15444-1)
-* Geospatial PDF
-* Arc/Info ASCII Grid
-* ASCII Gridded XYZ
-
-The list might be shorter if gdal_translate has not been built with support for the above formats (for example, the default JPEG-2000 format relies on the `JasPer-based GDAL driver `_).
-
-Once installed in GeoServer, a bunch of new supported formats will be listed in the ``ServiceMetadata`` section of the WCS 2.0 GetCapabilities document, e.g. ``image/jp2`` and ``application/pdf``.
-
-gdal_translate conversion abilities
------------------------------------
-
-The gdal_translate utility is usually able to convert more formats than the default setup of this output format allows for, but the exact list depends on how the utility was built from sources. To get a full list of the formats available by your ogr2ogr build just run::
-
- gdal_translate --long-usage
-
-and you'll get the full set of options usable by the program, along with the supported formats.
-
-.. include:: usage_example.txt
-
-The full list of formats that gdal_translate is able to support is available on the `GDAL site `_. Mind that this output format can handle only outputs that are file based and that do support creation. So, for example, you won't be able to use the PostGIS Raster output (since it's database based) or the Arc/Info Binary Grid (creation not supported).
-
-Customisation
--------------
-
-If gdal_translate is not available in the default path, the GDAL_DATA environment variable is not set, or if the output formats needs tweaking, a ``gdal_translate.xml`` configuration file can be created to customize the output format. The file should be put inside a ``gdal`` folder in the root of the GeoServer data directory.
-
-.. note:: GeoServer will automatically detect any change to the file and reload the configuration, without a need to restart.
-
-
-The default configuration is equivalent to the following xml file:
-
-.. code-block:: xml
-
-
- gdal_translate
-
-
-
-
-
- JPEG2000
- GDAL-JPEG2000
- .jp2
- true
- image/jp2
- binary
-
-
-
-
- PDF
- GDAL-PDF
- .pdf
- true
- application/pdf
-
-
-
-
-
-
- AAIGrid
- GDAL-ArcInfoGrid
- .asc
- false
-
-
- XYZ
- GDAL-XYZ
- .txt
- true
- text/plain
- text
-
-
-
-
-The file showcases all possible usage of the configuration elements:
-
-* ``executable`` can be just gdal_translate if the command is in the path, otherwise it should be the full path to the executable. For example, on a Linux box with a custom build GDAL library might be::
-
- /usr/local/bin/gdal_translate
-
-* ``environment`` contains a list of ``variable`` elements, which can be used to define environment variables that should be set prior to invoking gdal_translate. For example, to setup a GDAL_DATA environment variable pointing to the GDAL data directory, the configuration might be::
-
-
-
-
-
-* ``Format`` defines a single format, which is defined by the following tags:
-
- * ``toolFormat``: the name of the format to be passed to gdal_translate with the -of option (case insensitive).
- * ``geoserverFormat``: is the name of the output format as advertised by GeoServer
- * ``fileExtension``: is the extension of the file generated after the translation, if any (can be omitted)
- * ``option``: can be used to add one or more options to the gdal_translate command line. As you can see by the JPEG2000 example, each item must be contained in its own ``option`` tag. You can get a full list of options by running ``gdal_translate --help`` or by visiting the `GDAL web site `_). Also, consider that each format supports specific creation options, listed in the description page for each format (for example, here is the `JPEG2000 one `_).
- * ``singleFile``: if true the output of the conversion is supposed to be a single file that can be streamed directly back without the need to wrap it into a zip file
- * ``mimeType``: the mime type of the file returned when using ``singleFile``. If not specified ``application/octet-stream`` will be used as a default.
- * ``formatAdapters``: transformations on the coverage that might need to be applied in order to successfully encode the output. The transformations are applied only if their input conditions are met.
-
-The available format adapters are:
-
-* ``GrayAlphaToRGBA``: expands a gray image with alpha channel to RGBA (mandatory for geospatial PDF for example)
-* ``PallettedToRGB``: expands a paletted image RGB(A) (mandatory for geospatial PDF for example)
+.. _gdal_wcs_output_format:
+
+GDAL based WCS Output Format
+============================
+
+The gdal_translate based output format leverages the availability of the gdal_translate command to allow the generation of more output formats than GeoServer can natively produce.
+The basic idea is to dump to the file system a file that gdal_translate can translate, invoke it, zip and return the output of the translation.
+
+This extension is thus the equivalent of the :ref:`OGR extension ` for raster data.
+
+
+Out of the box behaviour
+------------------------
+
+Out of the box the plugin assumes the following:
+
+* gdal_translate is available in the path
+* the GDAL_DATA variable is pointing to the GDAL data directory (which stores the spatial reference information for GDAL)
+
+In the default configuration the following formats are supported:
+
+* JPEG-2000 part 1 (ISO/IEC 15444-1)
+* Geospatial PDF
+* Arc/Info ASCII Grid
+* ASCII Gridded XYZ
+
+The list might be shorter if gdal_translate has not been built with support for the above formats (for example, the default JPEG-2000 format relies on the `JasPer-based GDAL driver `_).
+
+Once installed in GeoServer, a bunch of new supported formats will be listed in the ``ServiceMetadata`` section of the WCS 2.0 GetCapabilities document, e.g. ``image/jp2`` and ``application/pdf``.
+
+gdal_translate conversion abilities
+-----------------------------------
+
+The gdal_translate utility is usually able to convert more formats than the default setup of this output format allows for, but the exact list depends on how the utility was built from sources. To get a full list of the formats available by your ogr2ogr build just run::
+
+ gdal_translate --long-usage
+
+and you'll get the full set of options usable by the program, along with the supported formats.
+
+.. include:: usage_example.txt
+
+The full list of formats that gdal_translate is able to support is available on the `GDAL site `_. Mind that this output format can handle only outputs that are file based and that do support creation. So, for example, you won't be able to use the PostGIS Raster output (since it's database based) or the Arc/Info Binary Grid (creation not supported).
+
+Customisation
+-------------
+
+If gdal_translate is not available in the default path, the GDAL_DATA environment variable is not set, or if the output formats needs tweaking, a ``gdal_translate.xml`` configuration file can be created to customize the output format. The file should be put inside a ``gdal`` folder in the root of the GeoServer data directory.
+
+.. note:: GeoServer will automatically detect any change to the file and reload the configuration, without a need to restart.
+
+
+The default configuration is equivalent to the following xml file:
+
+.. code-block:: xml
+
+
+ gdal_translate
+
+
+
+
+
+ JPEG2000
+ GDAL-JPEG2000
+ .jp2
+ true
+ image/jp2
+ binary
+
+
+
+
+ PDF
+ GDAL-PDF
+ .pdf
+ true
+ application/pdf
+
+
+
+
+
+
+ AAIGrid
+ GDAL-ArcInfoGrid
+ .asc
+ false
+
+
+ XYZ
+ GDAL-XYZ
+ .txt
+ true
+ text/plain
+ text
+
+
+
+
+The file showcases all possible usage of the configuration elements:
+
+* ``executable`` can be just gdal_translate if the command is in the path, otherwise it should be the full path to the executable. For example, on a Linux box with a custom build GDAL library might be::
+
+ /usr/local/bin/gdal_translate
+
+* ``environment`` contains a list of ``variable`` elements, which can be used to define environment variables that should be set prior to invoking gdal_translate. For example, to setup a GDAL_DATA environment variable pointing to the GDAL data directory, the configuration might be::
+
+
+
+
+
+* ``Format`` defines a single format, which is defined by the following tags:
+
+ * ``toolFormat``: the name of the format to be passed to gdal_translate with the -of option (case insensitive).
+ * ``geoserverFormat``: is the name of the output format as advertised by GeoServer
+ * ``fileExtension``: is the extension of the file generated after the translation, if any (can be omitted)
+ * ``option``: can be used to add one or more options to the gdal_translate command line. As you can see by the JPEG2000 example, each item must be contained in its own ``option`` tag. You can get a full list of options by running ``gdal_translate --help`` or by visiting the `GDAL web site `_). Also, consider that each format supports specific creation options, listed in the description page for each format (for example, here is the `JPEG2000 one `_).
+ * ``singleFile``: if true the output of the conversion is supposed to be a single file that can be streamed directly back without the need to wrap it into a zip file
+ * ``mimeType``: the mime type of the file returned when using ``singleFile``. If not specified ``application/octet-stream`` will be used as a default.
+ * ``formatAdapters``: transformations on the coverage that might need to be applied in order to successfully encode the output. The transformations are applied only if their input conditions are met.
+
+The available format adapters are:
+
+* ``GrayAlphaToRGBA``: expands a gray image with alpha channel to RGBA (mandatory for geospatial PDF for example)
+* ``PallettedToRGB``: expands a paletted image RGB(A) (mandatory for geospatial PDF for example)
diff --git a/doc/en/user/source/community/geomesa/index.rst b/doc/en/user/source/community/geomesa/index.rst
index ca9dd0aa97a..2684ce59eae 100644
--- a/doc/en/user/source/community/geomesa/index.rst
+++ b/doc/en/user/source/community/geomesa/index.rst
@@ -1,6 +1,6 @@
-.. _community_geomesa:
-
-GeoMesa data store
-==================
-
-`GeoMesa `_ provides a GeoTools DataStore to access SimpleFeatures stored in Apache Accumulo.
+.. _community_geomesa:
+
+GeoMesa data store
+==================
+
+`GeoMesa `_ provides a GeoTools DataStore to access SimpleFeatures stored in Apache Accumulo.
diff --git a/doc/en/user/source/community/index.rst b/doc/en/user/source/community/index.rst
index 74604ba4b37..c26567bc4be 100644
--- a/doc/en/user/source/community/index.rst
+++ b/doc/en/user/source/community/index.rst
@@ -1,61 +1,61 @@
-.. _community:
-
-Community modules
-=================
-
-This section is devoted to GeoServer community modules. Community modules are considered "pending" in that they are not
-officially part of the GeoServer releases. They are however built along with the
-`nightly builds `_, so you can download and play with them.
-
-.. warning::
-
- Community modules are generally considered experimental in nature and are often under constant development. For that reason documentation in this section should not be considered solid or final and will be subject to change.
-
-
-.. toctree::
- :maxdepth: 1
-
- oauth2/index
- keycloak/index
- keycloak/keycloak_role_service
- dds/index
- colormap/index
- jdbcconfig/index
- mbtiles/index
- geopkg/index
- pgraster/pgraster
- jms-cluster/index
- solr/index
- elasticsearch/index
- geomesa/index
- gwc-distributed/index
- flatgeobuf/index
- gdal/index
- gwc-azure-blob/index
- gwc-sqlite/index
- remote-wps/index
- jdbcstore/index
- ncwms/index
- backuprestore/index
- saml/index
- notification/index
- opensearch-eo/index
- s3-geotiff/index
- netcdf-ghrsst/index
- monitor-hibernate/index
- taskmanager/index
- metadata/index
- ogr-store/index
- geostyler/index
- csw-iso/index
- importer-jdbc/index
- hana/index
- features-templating/index
- ogc-api/index
- gsr/index
- cog/index
- cov-json/index
- smart-data-loader/index
- schemaless-features/index
- web-service-auth/index
- gwc-mbtiles/index
+.. _community:
+
+Community modules
+=================
+
+This section is devoted to GeoServer community modules. Community modules are considered "pending" in that they are not
+officially part of the GeoServer releases. They are however built along with the
+`nightly builds `_, so you can download and play with them.
+
+.. warning::
+
+ Community modules are generally considered experimental in nature and are often under constant development. For that reason documentation in this section should not be considered solid or final and will be subject to change.
+
+
+.. toctree::
+ :maxdepth: 1
+
+ oauth2/index
+ keycloak/index
+ keycloak/keycloak_role_service
+ dds/index
+ colormap/index
+ jdbcconfig/index
+ mbtiles/index
+ geopkg/index
+ pgraster/pgraster
+ jms-cluster/index
+ solr/index
+ elasticsearch/index
+ geomesa/index
+ gwc-distributed/index
+ flatgeobuf/index
+ gdal/index
+ gwc-azure-blob/index
+ gwc-sqlite/index
+ remote-wps/index
+ jdbcstore/index
+ ncwms/index
+ backuprestore/index
+ saml/index
+ notification/index
+ opensearch-eo/index
+ s3-geotiff/index
+ netcdf-ghrsst/index
+ monitor-hibernate/index
+ taskmanager/index
+ metadata/index
+ ogr-store/index
+ geostyler/index
+ csw-iso/index
+ importer-jdbc/index
+ hana/index
+ features-templating/index
+ ogc-api/index
+ gsr/index
+ cog/index
+ cov-json/index
+ smart-data-loader/index
+ schemaless-features/index
+ web-service-auth/index
+ gwc-mbtiles/index
diff --git a/doc/en/user/source/community/jdbcconfig/index.rst b/doc/en/user/source/community/jdbcconfig/index.rst
index 49bcff4fc2b..196bca80ddf 100644
--- a/doc/en/user/source/community/jdbcconfig/index.rst
+++ b/doc/en/user/source/community/jdbcconfig/index.rst
@@ -1,15 +1,15 @@
-.. _community_jdbcconfig:
-
-JDBCConfig
-==========
-
-The ``JDBCConfig module`` enhances the scalibility performance of the GeoServer Catalog.
-It allows externalising the storage of the Catalog configuration objects (such as workspaces, stores, layers) to a Relational Database Management System,
-rather than using xml files in the :ref:`datadir`. This way the Catalog can support access to unlimited numbers of those configuration objects efficiently.
-
-.. toctree::
- :maxdepth: 2
-
- installing
- configuration
-
+.. _community_jdbcconfig:
+
+JDBCConfig
+==========
+
+The ``JDBCConfig module`` enhances the scalibility performance of the GeoServer Catalog.
+It allows externalising the storage of the Catalog configuration objects (such as workspaces, stores, layers) to a Relational Database Management System,
+rather than using xml files in the :ref:`datadir`. This way the Catalog can support access to unlimited numbers of those configuration objects efficiently.
+
+.. toctree::
+ :maxdepth: 2
+
+ installing
+ configuration
+
diff --git a/doc/en/user/source/community/jdbcstore/index.rst b/doc/en/user/source/community/jdbcstore/index.rst
index 4dc6f2c309a..53a58803dc5 100644
--- a/doc/en/user/source/community/jdbcstore/index.rst
+++ b/doc/en/user/source/community/jdbcstore/index.rst
@@ -1,13 +1,13 @@
-.. _community_jdbcstore:
-
-JDBCStore
-==========
-
-The ``JDBCStore module`` allows efficient sharing of configuration data in a clustered deployment of GeoServer. It allows externalising the storage of all configuration resources to a Relational Database Management System, rather than using the default File System based :ref:`datadir`. This way the multiple instances of GeoServer can use the same Database and therefore share in the same configuration.
-
-.. toctree::
- :maxdepth: 2
-
- installing
- configuration
-
+.. _community_jdbcstore:
+
+JDBCStore
+==========
+
+The ``JDBCStore module`` allows efficient sharing of configuration data in a clustered deployment of GeoServer. It allows externalising the storage of all configuration resources to a Relational Database Management System, rather than using the default File System based :ref:`datadir`. This way the multiple instances of GeoServer can use the same Database and therefore share in the same configuration.
+
+.. toctree::
+ :maxdepth: 2
+
+ installing
+ configuration
+
diff --git a/doc/en/user/source/community/monitor-hibernate/index.rst b/doc/en/user/source/community/monitor-hibernate/index.rst
index 5fa0d546fc9..14881575b91 100644
--- a/doc/en/user/source/community/monitor-hibernate/index.rst
+++ b/doc/en/user/source/community/monitor-hibernate/index.rst
@@ -1,18 +1,18 @@
-.. _monitor_hibernate_extension:
-
-Monitoring Hibernate storage
-============================
-
-The monitor hibernate storage allows to track the requests made against a GeoServer instance
-in a relational database, as opposed to keeping the data in memory for a short time, or
-logging it on a audit file.
-
-.. toctree::
- :maxdepth: 2
-
- installation/
- configuration/
- db/
- upgrade/
-
-
+.. _monitor_hibernate_extension:
+
+Monitoring Hibernate storage
+============================
+
+The monitor hibernate storage allows to track the requests made against a GeoServer instance
+in a relational database, as opposed to keeping the data in memory for a short time, or
+logging it on a audit file.
+
+.. toctree::
+ :maxdepth: 2
+
+ installation/
+ configuration/
+ db/
+ upgrade/
+
+
diff --git a/doc/en/user/source/community/netcdf-ghrsst/index.rst b/doc/en/user/source/community/netcdf-ghrsst/index.rst
index 9935c1dc803..be631297989 100644
--- a/doc/en/user/source/community/netcdf-ghrsst/index.rst
+++ b/doc/en/user/source/community/netcdf-ghrsst/index.rst
@@ -1,80 +1,80 @@
-.. _community_netcdf_ghrsst:
-
-GHRSST NetCDF output
-=====================
-
-`GHRSST `_ is Group for High Resolution Sea Surface Temperature.
-Among its various activities it issued a `specification on how sea surface temperature data should be organized
-in NetCDF files `_.
-
-The NetCDF GHRSST module allows to generate complaint GHRSST files as WCS outputs, given a compliant GHRSST input.
-
-Installation
-------------
-
-As a community module, the package needs to be downloaded from the `nightly builds `_,
-picking the community folder of the corresponding GeoServer series (e.g. if working on the GeoServer main development branch nightly
-builds, pick the zip file form ``main/community-latest``).
-
-To install the module, unpack the zip file contents into GeoServer own ``WEB-INF/lib`` directory and
-restart GeoServer.
-
-For the module to work, the :ref:`netcdf` and :ref:`netcdf-out` extensions must also be installed.
-
-Input preparation
------------------
-
-A GHRSST file contains multiple variables that are related with each other, and should be explored
-toghether in order to better understand the data. Thus, it is assumed that the source GHRSST file is published
-as a single coverage view holding all the variables as bands, retaining their native name (this is important for
-the plugin to work):
-
-.. figure:: images/coverageView.png
- :align: center
-
- *Setting up a coverage view with all variables as bands*
-
-A GHRSST output must also have a time, so the time dimension of this layer should be enabled (the output generation will fail
-with an error otherwise).
-
-At the time of writing a coverage view requires the source bands to be of uniform data type, and the data sources might
-not be. In case setting up the view is not possible with the data available, a NCML file can be used to reprocess
-the source NetCDF into one that has bands with uniform data type. A downloadable example has been provided to facilitate
-setting up the view.
-
-:download:`Download the reference NCML transformation `
-
-
-The GHRSST may also have to be setup in a image mosaic in order to provide a deep temporal layer that users can select
-data from. The image mosaic setup can be complex, so a downloadable example has been provided for it as well (will require
-some changes, at a minimum, fix the paths at the bottom of indexer.xml, and the database connection parameters in the
-two datastore properties files).
-
-:download:`Download the sample mosaic configuration files `
-
-
-Configuring GHRSST output
--------------------------
-
-The normal WCS NetCDF output will pick the first band of a coverage and generate a single variable NetCDF output.
-When the GHRSST plugin is installed, a new UI element will show up that enables GHRSST output:
-
-.. figure:: images/ghrsstConfiguration.png
- :align: center
-
- *Enabling GHRSST output mode*
-
-Notes about the configuration UI:
-
-* Various normal configurations such as variable name, unit of measure, and data packing will be ignored (each
- variable in GHRSST has its own assigned data type and packing, as from specification)
-* For the output to be compliant, enable copy of both global and per variable attributes
-* The RDAC, Processing Level, SST Type and Product String have to be filled in order to generate a valid GHRSST
- file name in output. The user interface provides auto-complete with names picked from the specification, but others
- can be inputed as well.
-
-For the output to generate correctly the coverage band names have to follow exactly the expected specification variable
-names (which comes naturally if the input is valid GHRSST), variable will be re-packed in output according to
-specification, so even if the inputs are all floats, the output will follow the expected data types.
-
-Any extra coverage band not present in the specification will be copied from input to output un-modified.
+.. _community_netcdf_ghrsst:
+
+GHRSST NetCDF output
+=====================
+
+`GHRSST `_ is Group for High Resolution Sea Surface Temperature.
+Among its various activities it issued a `specification on how sea surface temperature data should be organized
+in NetCDF files `_.
+
+The NetCDF GHRSST module allows to generate complaint GHRSST files as WCS outputs, given a compliant GHRSST input.
+
+Installation
+------------
+
+As a community module, the package needs to be downloaded from the `nightly builds `_,
+picking the community folder of the corresponding GeoServer series (e.g. if working on the GeoServer main development branch nightly
+builds, pick the zip file form ``main/community-latest``).
+
+To install the module, unpack the zip file contents into GeoServer own ``WEB-INF/lib`` directory and
+restart GeoServer.
+
+For the module to work, the :ref:`netcdf` and :ref:`netcdf-out` extensions must also be installed.
+
+Input preparation
+-----------------
+
+A GHRSST file contains multiple variables that are related with each other, and should be explored
+toghether in order to better understand the data. Thus, it is assumed that the source GHRSST file is published
+as a single coverage view holding all the variables as bands, retaining their native name (this is important for
+the plugin to work):
+
+.. figure:: images/coverageView.png
+ :align: center
+
+ *Setting up a coverage view with all variables as bands*
+
+A GHRSST output must also have a time, so the time dimension of this layer should be enabled (the output generation will fail
+with an error otherwise).
+
+At the time of writing a coverage view requires the source bands to be of uniform data type, and the data sources might
+not be. In case setting up the view is not possible with the data available, a NCML file can be used to reprocess
+the source NetCDF into one that has bands with uniform data type. A downloadable example has been provided to facilitate
+setting up the view.
+
+:download:`Download the reference NCML transformation `
+
+
+The GHRSST may also have to be setup in a image mosaic in order to provide a deep temporal layer that users can select
+data from. The image mosaic setup can be complex, so a downloadable example has been provided for it as well (will require
+some changes, at a minimum, fix the paths at the bottom of indexer.xml, and the database connection parameters in the
+two datastore properties files).
+
+:download:`Download the sample mosaic configuration files `
+
+
+Configuring GHRSST output
+-------------------------
+
+The normal WCS NetCDF output will pick the first band of a coverage and generate a single variable NetCDF output.
+When the GHRSST plugin is installed, a new UI element will show up that enables GHRSST output:
+
+.. figure:: images/ghrsstConfiguration.png
+ :align: center
+
+ *Enabling GHRSST output mode*
+
+Notes about the configuration UI:
+
+* Various normal configurations such as variable name, unit of measure, and data packing will be ignored (each
+ variable in GHRSST has its own assigned data type and packing, as from specification)
+* For the output to be compliant, enable copy of both global and per variable attributes
+* The RDAC, Processing Level, SST Type and Product String have to be filled in order to generate a valid GHRSST
+ file name in output. The user interface provides auto-complete with names picked from the specification, but others
+ can be inputed as well.
+
+For the output to generate correctly the coverage band names have to follow exactly the expected specification variable
+names (which comes naturally if the input is valid GHRSST), variable will be re-packed in output according to
+specification, so even if the inputs are all floats, the output will follow the expected data types.
+
+Any extra coverage band not present in the specification will be copied from input to output un-modified.
diff --git a/doc/en/user/source/community/ogr-store/index.rst b/doc/en/user/source/community/ogr-store/index.rst
index 24c4e4045f3..b0caf9997ba 100644
--- a/doc/en/user/source/community/ogr-store/index.rst
+++ b/doc/en/user/source/community/ogr-store/index.rst
@@ -1,118 +1,118 @@
-.. _ogr_store:
-
-OGR datastore
-=============
-
-The OGR datastore module allows to use the `GDAL/OGR ` native library
-to access a wide variety of vector spatial formats and publish them in GeoServer.
-
-This library is recommended to use when a particular data source does not have a GeoServer pure Java
-datastore fulfilling the same needs, in particular, compared to built in sources, it has the following limitations:
-
-* Generally slower than the existing pure Java counterparts, especially for map rendering (the GeoServer
- stores can help rendering by providing reduced resolution version of the geometries, OGR provides no
- such facility)
-* Less scalable than the pure Java counterparts, as the DataSource objects used to access data are not
- thread safe (see the pooling options below)
-* More risky than the pure java counterparts, a SEGFAULT occurring inside OGR will take down the entire
- GeoServer process (while a pure Java exception is managed and reported, but won't have consequences
- on the server itself)
-
-The OGR store has been tested with GDAL 2.2.x, but might be working with other versions as well.
-In case of malfunctions, you can try to remove the ``gdal-.jar`` file from the GeoServer
-installation package, and replace it with the specific version jar instead, which you should find
-in your GDAL installation.
-
-
-Installing
-----------
-
-This is a community module, which means that it will not be available in the GeoServer official releases and needs to be installed manually.
-
-This module can be installed following these steps:
-
-1. Download this module package from the `nightly builds `_, the module version should match the desired GeoServer version.
-
-2. Extract the contents of the package into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-3. Make sure that the GDAL library as well as the GDAL JNI native library are available in the GeoServer path (see below).
-
-Linux installation details
-^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-On Linux the native librariers are commonly available via packages such as ``gdal`` and ``gdal-java``,
-which, on installation, make available the required libraries on the file system (the specific name may vary)::
-
- /usr/lib/libgdal.so
- /usr/lib/jni/libgdaljni.so
-
-Normally these directories are already in the ``PATH``, so no further configuration is required.
-
-If using a custom build instead, the ``LD_LIBRARY_PATH`` and ``GDAL_DATA`` directories::
-
- export LD_LIBRARY_PATH /path/to/gdal/libraries
- export GDAL_DATA /path/to/gdal/data
-
-See also the GDAL FAQ `about the GDAL_DATA setup `_.
-
-Windows installation details
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-On windows the files in question might look like::
-
- gdal204.dll
- gdalalljni.dll
-
-Locating a pre-build GDAL that includes Java support might be difficult. One option is to download
-the `gisinternals.com `_ packages, in particular the
-release zip packages including both mapserver and GDAL (these are rather complete and include the necessary libraries,
-whilst the MSI installers are typically missing the Java support).
-
-Once the package is available on disk, one has to set the following environment variables before
-starting GeoServer (the path might change depending on the package that is being downloaded)::
-
- set PATH=%PATH%;C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin;C:\tmp\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal\java
- set GDAL_DRIVER_PATH=C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal\plugins
- set GDAL_DATA=C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal-data
-
-Configuring a store
--------------------
-
-If the library is properly installed you will get the "OGR" data store among the supported stores
-in the "new store" page. In case it's not there, check the logs, they might be reporting that
-the GDAL/OGR native libs are missing, if the error is not there, check that the jars have been
-unpacked in the right position instead.
-
-Creating a new store requires configuration of only the :guilabel:`DatasourceName` field, any other parameter is
-optional:
-
-.. figure:: images/store_config.png
- :align: center
-
- *The OGR datasore configuration page*
-
-The :guilabel:`DatasourceName` can be a reference to a file, a directory, or a set of connection parameters to
-a server. For example, to connect to a PostGIS database the connection parameters could be:
-
- ``PG:user=theUser password=thePassword dbname=theDatabase``
-
-Notice how, unlike documented in the OGR page, single quotes are not needed (and actually harmful) around the
-user/password/dbname section.
-The :guilabel:`Browse` button can be used to quickly peek files or directories from the file system.
-
-The :guilabel:`Driver` parameter is optional, OGR should be able to recognize the appropriate driver automatically,
-but it's useful to force a specific choice when multiple competing drivers are available for the same
-data source (e.g., OpenFileGDB vs FileGDB).
-
-The pooling parameters, similar to those found in a database, merit an explanation.
-OGR exposes access to data throught DataSource objects, which are not thread safe, so only one
-request at a time can use them. At the same time, they can be expensive to create and hold onto
-useful state, like in memory data caches, spatial indexes and the like.
-As such, they have been stored in a pool much like relational database connections.
-
-The :guilabel:`Prime DataSources` option can be enabled to force a full read of the source data
-before the GDAL ``DataSource`` object is used. In some formats this allows the creation of useful
-support data structures, like an in memory spatial index in the ``OpenFileGDB`` format.
-Since the full read can be expensive, care should be taken to configure the pooling options so that
-it gets reused as much as possible (e.g., setting a higher ``min connections``, eventually setting
+.. _ogr_store:
+
+OGR datastore
+=============
+
+The OGR datastore module allows to use the `GDAL/OGR ` native library
+to access a wide variety of vector spatial formats and publish them in GeoServer.
+
+This library is recommended to use when a particular data source does not have a GeoServer pure Java
+datastore fulfilling the same needs, in particular, compared to built in sources, it has the following limitations:
+
+* Generally slower than the existing pure Java counterparts, especially for map rendering (the GeoServer
+ stores can help rendering by providing reduced resolution version of the geometries, OGR provides no
+ such facility)
+* Less scalable than the pure Java counterparts, as the DataSource objects used to access data are not
+ thread safe (see the pooling options below)
+* More risky than the pure java counterparts, a SEGFAULT occurring inside OGR will take down the entire
+ GeoServer process (while a pure Java exception is managed and reported, but won't have consequences
+ on the server itself)
+
+The OGR store has been tested with GDAL 2.2.x, but might be working with other versions as well.
+In case of malfunctions, you can try to remove the ``gdal-.jar`` file from the GeoServer
+installation package, and replace it with the specific version jar instead, which you should find
+in your GDAL installation.
+
+
+Installing
+----------
+
+This is a community module, which means that it will not be available in the GeoServer official releases and needs to be installed manually.
+
+This module can be installed following these steps:
+
+1. Download this module package from the `nightly builds `_, the module version should match the desired GeoServer version.
+
+2. Extract the contents of the package into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+3. Make sure that the GDAL library as well as the GDAL JNI native library are available in the GeoServer path (see below).
+
+Linux installation details
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+On Linux the native librariers are commonly available via packages such as ``gdal`` and ``gdal-java``,
+which, on installation, make available the required libraries on the file system (the specific name may vary)::
+
+ /usr/lib/libgdal.so
+ /usr/lib/jni/libgdaljni.so
+
+Normally these directories are already in the ``PATH``, so no further configuration is required.
+
+If using a custom build instead, the ``LD_LIBRARY_PATH`` and ``GDAL_DATA`` directories::
+
+ export LD_LIBRARY_PATH /path/to/gdal/libraries
+ export GDAL_DATA /path/to/gdal/data
+
+See also the GDAL FAQ `about the GDAL_DATA setup `_.
+
+Windows installation details
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+On windows the files in question might look like::
+
+ gdal204.dll
+ gdalalljni.dll
+
+Locating a pre-build GDAL that includes Java support might be difficult. One option is to download
+the `gisinternals.com `_ packages, in particular the
+release zip packages including both mapserver and GDAL (these are rather complete and include the necessary libraries,
+whilst the MSI installers are typically missing the Java support).
+
+Once the package is available on disk, one has to set the following environment variables before
+starting GeoServer (the path might change depending on the package that is being downloaded)::
+
+ set PATH=%PATH%;C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin;C:\tmp\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal\java
+ set GDAL_DRIVER_PATH=C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal\plugins
+ set GDAL_DATA=C:\path\to\release-1900-x64-gdal-2-4-0-mapserver-7-2-1\bin\gdal-data
+
+Configuring a store
+-------------------
+
+If the library is properly installed you will get the "OGR" data store among the supported stores
+in the "new store" page. In case it's not there, check the logs, they might be reporting that
+the GDAL/OGR native libs are missing, if the error is not there, check that the jars have been
+unpacked in the right position instead.
+
+Creating a new store requires configuration of only the :guilabel:`DatasourceName` field, any other parameter is
+optional:
+
+.. figure:: images/store_config.png
+ :align: center
+
+ *The OGR datasore configuration page*
+
+The :guilabel:`DatasourceName` can be a reference to a file, a directory, or a set of connection parameters to
+a server. For example, to connect to a PostGIS database the connection parameters could be:
+
+ ``PG:user=theUser password=thePassword dbname=theDatabase``
+
+Notice how, unlike documented in the OGR page, single quotes are not needed (and actually harmful) around the
+user/password/dbname section.
+The :guilabel:`Browse` button can be used to quickly peek files or directories from the file system.
+
+The :guilabel:`Driver` parameter is optional, OGR should be able to recognize the appropriate driver automatically,
+but it's useful to force a specific choice when multiple competing drivers are available for the same
+data source (e.g., OpenFileGDB vs FileGDB).
+
+The pooling parameters, similar to those found in a database, merit an explanation.
+OGR exposes access to data throught DataSource objects, which are not thread safe, so only one
+request at a time can use them. At the same time, they can be expensive to create and hold onto
+useful state, like in memory data caches, spatial indexes and the like.
+As such, they have been stored in a pool much like relational database connections.
+
+The :guilabel:`Prime DataSources` option can be enabled to force a full read of the source data
+before the GDAL ``DataSource`` object is used. In some formats this allows the creation of useful
+support data structures, like an in memory spatial index in the ``OpenFileGDB`` format.
+Since the full read can be expensive, care should be taken to configure the pooling options so that
+it gets reused as much as possible (e.g., setting a higher ``min connections``, eventually setting
it to the same value as ``max connections``).
\ No newline at end of file
diff --git a/doc/en/user/source/community/solr/configure.rst b/doc/en/user/source/community/solr/configure.rst
index df25c2f7fc1..bafd961af25 100644
--- a/doc/en/user/source/community/solr/configure.rst
+++ b/doc/en/user/source/community/solr/configure.rst
@@ -1,156 +1,156 @@
-.. _community_solr_configure:
-
-SOLR layer configuration
-========================
-
-Mapping documents to layers
----------------------------
-
-SOLR indexes almost free form documents, the SOLR instance has a collection of fields, and
-each document can contain any field, in any combination.
-On the other side, GeoServer organizes data in fixed structure feature types, and exposes
-data in separate layers. This leaves the question of how documents in the index
-should be organized into layers.
-
-By default the store exposes a single layer, normally named after the SOLR collection the store is connected
-to, by publishing it one can decide which fields to include, and eventually add a filter
-to select which attributes it will contain.
-
-This single layer can be published multiple times, giving each published layer a different name,
-set of selected attributes, and a different filter to select the documents contained in the layer.
-
-Installing the SOLR extension
------------------------------------
-
-#. Download the SOLR extension from the `nightly GeoServer community module builds `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance.
-
-#. If GeoServer is running, stop it.
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-#. Restart GeoServer, the SOLR data store should show up as an option when going through the new store
- creation workflow.
-
-Connecting to a SOLR server
-----------------------------
-
-Once the extension is properly installed ``SOLR`` will show up as an option when creating a new data store.
-
-.. figure:: images/solr_store.png
- :align: center
-
- *SOLR in the list of vector data sources*
-
-.. _community_solr_configure_store:
-
-Configuring a SOLR data store
------------------------------
-
-.. figure:: images/solr_configuration.png
- :align: center
-
- *Configuring a SOLR data store*
-
-.. list-table::
- :widths: 20 80
-
- * - ``solr_url``
- - Provide a link to the SOLR server that provides the documents
-
-Once the parameters are entered and confirmed, GeoServer will contact the SOLR server and
-fetch a list of layer names and fill the layer chooser page accordingly:
-
-.. figure:: images/solr_layerlist.png
- :align: center
-
- *List of layers available in the SOLR server*
-
-Configuring a new SOLR base layer
----------------------------------
-
-Once the layer name is chosen, the usual layer configuration panel will appear, with a pop-up showing
-in a table the fields available:
-
-.. figure:: images/solr_fieldlist.png
- :align: center
-
- *The layer field list configuration*
-
-.. list-table::
- :widths: 20 80
-
- * - ``Is empty``
- - Read only fields, checked if the field has no values in the documents associated to this layer
- * - ``Use``
- - Used to select the fields that will make up this layer features
- * - ``Name``
- - Name of the field
- * - ``Type``
- - Type of the field, as derived from the SOLR schema. For geometry types, you have the option to provide a more specific data type
- * - ``SRID``
- - Native spatial reference ID of the geometries
- * - ``Default geometry``
- - Indicates if the geometry field is the default one. Useful if the documents contain more than one geometry field,
- as SLDs and spatial filters will hit the default geometry field unless otherwise specified
- * - ``Identifier``
- - Check if the field can be used as the feature identifier
-
-
-By default the list will contain only the fields that have at least one non null value in the documents
-associated to the layer, but it is possible to get the full list by un-checking the "Hide field if empty"
-check-box:
-
-.. figure:: images/solr_fieldlist_all.png
- :align: center
-
- *Showing all fields available in SOLR*
-
-Once the table is filled with the all the required parameters, press the "Apply" button to confirm
-and go back to the main layer configuration panel.
-Should the choice of fields be modified, you can click the "Configure SOLR fields" just below the "Feature Type Details" panel.
-
-.. figure:: images/solr_fieldlist_edit.png
- :align: center
-
- *Going back to the field list editor*
-
-The rest of the layer configuration works as normal, once all the fields are provided you'll be able to
-save and use the layer in WMS and WFS.
-
-.. warning:: In order to compute the bounding box GeoServer will have to fetch all the geometries making up the layer out of SOLR,
- this operation might take some time, you're advised to manually entered the native bounding box when configuring a
- layer out of a large document set
-
-Custom ``q`` and ``fq`` parameters
-----------------------------------
-
-The SOLR store will translate most OGC filters, as specified in SLD, CQL Filter or OGC filter,
-down into the SOLR engine for native filtering, using the ``fq`` parameter.
-However, in some occasions you might need to specify manually either ``q`` or ``fq``, to leverage
-some native SOLR filtering ability that cannot be expressed via OGC filters.
-
-This can be done by specifying those as ``viewparams``, pretty much like in parametric sql views
-atop relational databases.
-
-For example, the following URL::
-
- http://localhost:8080/geoserver/nurc/wms?service=WMS&version=1.1.0&request=GetMap
- &layers=nurc:active&styles=geo2&bbox=0.0,0.0,24.0,44.0&width=279&height=512
- &srs=EPSG:4326&format=application/openlayers
- &viewparams=fq:security_ss:WEP
-
-Will send down to SOLR a query looking like::
-
- omitHeader=true&fl=geo,id&q=*:*&rows=2147483647&sort=id asc
- &fq=status_s:active AND geo:"Intersects(POLYGON ((-0.125 -0.5333333333333333, -0.125 44.53333333333333,
- 24.125 44.53333333333333, 24.125 -0.5333333333333333, -0.125 -0.5333333333333333)))"
- &fq=security_ss:WEP&cursorMark=*
-
-You can notice that:
-
-* Only the columns needed for the display (in this case, a single geometry) are retrieved
-* The bbox and layer identification filters are specified in the first ``fq``
-* The custom ``fq`` is passed as a second ``fq`` parameter (SOLR will treat it as being and-ed with
- the previuos one)
+.. _community_solr_configure:
+
+SOLR layer configuration
+========================
+
+Mapping documents to layers
+---------------------------
+
+SOLR indexes almost free form documents, the SOLR instance has a collection of fields, and
+each document can contain any field, in any combination.
+On the other side, GeoServer organizes data in fixed structure feature types, and exposes
+data in separate layers. This leaves the question of how documents in the index
+should be organized into layers.
+
+By default the store exposes a single layer, normally named after the SOLR collection the store is connected
+to, by publishing it one can decide which fields to include, and eventually add a filter
+to select which attributes it will contain.
+
+This single layer can be published multiple times, giving each published layer a different name,
+set of selected attributes, and a different filter to select the documents contained in the layer.
+
+Installing the SOLR extension
+-----------------------------------
+
+#. Download the SOLR extension from the `nightly GeoServer community module builds `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance.
+
+#. If GeoServer is running, stop it.
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+#. Restart GeoServer, the SOLR data store should show up as an option when going through the new store
+ creation workflow.
+
+Connecting to a SOLR server
+----------------------------
+
+Once the extension is properly installed ``SOLR`` will show up as an option when creating a new data store.
+
+.. figure:: images/solr_store.png
+ :align: center
+
+ *SOLR in the list of vector data sources*
+
+.. _community_solr_configure_store:
+
+Configuring a SOLR data store
+-----------------------------
+
+.. figure:: images/solr_configuration.png
+ :align: center
+
+ *Configuring a SOLR data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - ``solr_url``
+ - Provide a link to the SOLR server that provides the documents
+
+Once the parameters are entered and confirmed, GeoServer will contact the SOLR server and
+fetch a list of layer names and fill the layer chooser page accordingly:
+
+.. figure:: images/solr_layerlist.png
+ :align: center
+
+ *List of layers available in the SOLR server*
+
+Configuring a new SOLR base layer
+---------------------------------
+
+Once the layer name is chosen, the usual layer configuration panel will appear, with a pop-up showing
+in a table the fields available:
+
+.. figure:: images/solr_fieldlist.png
+ :align: center
+
+ *The layer field list configuration*
+
+.. list-table::
+ :widths: 20 80
+
+ * - ``Is empty``
+ - Read only fields, checked if the field has no values in the documents associated to this layer
+ * - ``Use``
+ - Used to select the fields that will make up this layer features
+ * - ``Name``
+ - Name of the field
+ * - ``Type``
+ - Type of the field, as derived from the SOLR schema. For geometry types, you have the option to provide a more specific data type
+ * - ``SRID``
+ - Native spatial reference ID of the geometries
+ * - ``Default geometry``
+ - Indicates if the geometry field is the default one. Useful if the documents contain more than one geometry field,
+ as SLDs and spatial filters will hit the default geometry field unless otherwise specified
+ * - ``Identifier``
+ - Check if the field can be used as the feature identifier
+
+
+By default the list will contain only the fields that have at least one non null value in the documents
+associated to the layer, but it is possible to get the full list by un-checking the "Hide field if empty"
+check-box:
+
+.. figure:: images/solr_fieldlist_all.png
+ :align: center
+
+ *Showing all fields available in SOLR*
+
+Once the table is filled with the all the required parameters, press the "Apply" button to confirm
+and go back to the main layer configuration panel.
+Should the choice of fields be modified, you can click the "Configure SOLR fields" just below the "Feature Type Details" panel.
+
+.. figure:: images/solr_fieldlist_edit.png
+ :align: center
+
+ *Going back to the field list editor*
+
+The rest of the layer configuration works as normal, once all the fields are provided you'll be able to
+save and use the layer in WMS and WFS.
+
+.. warning:: In order to compute the bounding box GeoServer will have to fetch all the geometries making up the layer out of SOLR,
+ this operation might take some time, you're advised to manually entered the native bounding box when configuring a
+ layer out of a large document set
+
+Custom ``q`` and ``fq`` parameters
+----------------------------------
+
+The SOLR store will translate most OGC filters, as specified in SLD, CQL Filter or OGC filter,
+down into the SOLR engine for native filtering, using the ``fq`` parameter.
+However, in some occasions you might need to specify manually either ``q`` or ``fq``, to leverage
+some native SOLR filtering ability that cannot be expressed via OGC filters.
+
+This can be done by specifying those as ``viewparams``, pretty much like in parametric sql views
+atop relational databases.
+
+For example, the following URL::
+
+ http://localhost:8080/geoserver/nurc/wms?service=WMS&version=1.1.0&request=GetMap
+ &layers=nurc:active&styles=geo2&bbox=0.0,0.0,24.0,44.0&width=279&height=512
+ &srs=EPSG:4326&format=application/openlayers
+ &viewparams=fq:security_ss:WEP
+
+Will send down to SOLR a query looking like::
+
+ omitHeader=true&fl=geo,id&q=*:*&rows=2147483647&sort=id asc
+ &fq=status_s:active AND geo:"Intersects(POLYGON ((-0.125 -0.5333333333333333, -0.125 44.53333333333333,
+ 24.125 44.53333333333333, 24.125 -0.5333333333333333, -0.125 -0.5333333333333333)))"
+ &fq=security_ss:WEP&cursorMark=*
+
+You can notice that:
+
+* Only the columns needed for the display (in this case, a single geometry) are retrieved
+* The bbox and layer identification filters are specified in the first ``fq``
+* The custom ``fq`` is passed as a second ``fq`` parameter (SOLR will treat it as being and-ed with
+ the previuos one)
diff --git a/doc/en/user/source/community/solr/index.rst b/doc/en/user/source/community/solr/index.rst
index 05543484994..60f0601b563 100644
--- a/doc/en/user/source/community/solr/index.rst
+++ b/doc/en/user/source/community/solr/index.rst
@@ -1,29 +1,29 @@
-.. _community_solr:
-
-SOLR data store
-===============
-
-`SOLR `_ is a popular search platform based on Apache Lucene project.
-Its major features include powerful full-text search, hit highlighting, faceted search, near real-time indexing,
-dynamic clustering, database integration, rich document (e.g., Word, PDF) handling, and most
-importantly for the GeoServer integration, geospatial search.
-
-The latest versions of SOLR can host most basic types of geometries (points, lines and polygons)
-as WKT and index them with a spatial index.
-
-.. note:: GeoServer does not come built-in with support for SOLR; it must be installed through this community module.
-
-The GeoServer SOLR extension has been tested with SOLR version 4.8, 4.9, and 4.10.
-
-The extension supports all WKT geometry types (all linear types, point, lines and polygons, SQL/MMcurves are not supported),
-plus "bounding box" (available starting SOLR 4.10).
-It does not support the ``solr.LatLonType`` type yet.
-
-The following pages shows how to use the SOLR data store.
-
-.. toctree::
- :maxdepth: 2
-
- configure
- load
- optimize
+.. _community_solr:
+
+SOLR data store
+===============
+
+`SOLR `_ is a popular search platform based on Apache Lucene project.
+Its major features include powerful full-text search, hit highlighting, faceted search, near real-time indexing,
+dynamic clustering, database integration, rich document (e.g., Word, PDF) handling, and most
+importantly for the GeoServer integration, geospatial search.
+
+The latest versions of SOLR can host most basic types of geometries (points, lines and polygons)
+as WKT and index them with a spatial index.
+
+.. note:: GeoServer does not come built-in with support for SOLR; it must be installed through this community module.
+
+The GeoServer SOLR extension has been tested with SOLR version 4.8, 4.9, and 4.10.
+
+The extension supports all WKT geometry types (all linear types, point, lines and polygons, SQL/MMcurves are not supported),
+plus "bounding box" (available starting SOLR 4.10).
+It does not support the ``solr.LatLonType`` type yet.
+
+The following pages shows how to use the SOLR data store.
+
+.. toctree::
+ :maxdepth: 2
+
+ configure
+ load
+ optimize
diff --git a/doc/en/user/source/community/solr/load.rst b/doc/en/user/source/community/solr/load.rst
index ce6f485faa1..251168a20f8 100644
--- a/doc/en/user/source/community/solr/load.rst
+++ b/doc/en/user/source/community/solr/load.rst
@@ -1,80 +1,80 @@
-.. _community_solr_load:
-
-Loading spatial data into SOLR
-------------------------------
-
-This section provides a simple example on how to convert and load a shapefile into a SOLR instance.
-For more advanced needs and details about spatial support in SOLR consult the SOLR documentation,
-making sure to read the one associated to the version at hand (spatial support is still rapidly
-evolving).
-
-The current example has been developed and tested using GDAL 1.11 and SOLR 4.8, different versions
-of the tools and server might require a different syntax for upload.
-
-The SOLR instance is supposed to have the following definitions in its schema:
-
-.. code-block:: xml
-
-
-
-
-
-The above defines "geo" as explicit fields, leaving the other types to dynamic field interpretation.
-
-The SpatialRecursivePrefixTreeFieldType accepts geometries as WKT, so as a preparation for the
-import we are going to turn a shapefile into a CSV file with WKT syntax for the geometry.
-Let's also remember that SOLR needs a unique id field for the records, and that the coordinates
-are supposed to be in WGS84.
-The shapefile in question is instead in UTM, has a linestring geometry, and some fields, cat,id and label.
-
-The following command translates the shapefile in CSV (the command should be typed in a single line,
-it has been split over multiple lines for ease of reading)::
-
- ogr2ogr -f CSV
- -sql 'select FID as id, cat as cat_i, label as label_s,
- "roads" as layer FROM roads'
- -lco geometry=AS_WKT -s_srs "EPSG:26713" -t_srs "EPSG:4326"
- /tmp/roads.csv roads.shp
-
-Some observations:
-
- * The SQL is used mostly to include the special FID field into the results (a unique field is required)
- * The reprojection is performed to ensure the output geometries are in WGS84
- * The ``layer_s`` dynamic field is added to
-
-.. note:
-
- The "roads" syntax might not work correctly starting from GDAL 2.0, where a single quote should be
- used instead. Starting with GDAL 2.1 it will also be possible to add a ``-lco GEOMETRY_NAME=geo``
- to directly set the desired geometry name
-
-This will generate a CSV file looking as follows::
-
- WKT,id,cat_i,label_s,layer
- "LINESTRING (-103.763291353072518 44.375039982911382,-103.763393874038698 44.375282535746727,-103.764152625689903 44.376816068582023,-103.763893508430911 44.377653708326527,-103.76287152579593 44.378473197876396,-103.762075892308829 44.379009292692757,-103.76203441159079 44.379195585236509,-103.762124217456204 44.379295262047272,-103.762168141872152 44.379399997909999,-103.762326134985983 44.379527769244149,-103.763328403265064 44.380245486928708,-103.764011871363465 44.381295133519728,-103.76411460103661 44.381526706124056,-103.764953940327757 44.382396618315049,-103.765097289111338 44.382919576408355,-103.765147974157941 44.383073790503197,-103.76593766187851 44.384162856249255,-103.765899236602976 44.384607239970421,-103.765854384388703 44.384597320206453)",0,5,unimproved road,roads
- "LINESTRING (-103.762930948900078 44.385847721442218,-103.763012156628747 44.386002223293282,-103.763510654805799 44.386297912655408,-103.763869052966967 44.386746022746649,-103.763971116268394 44.387444295314552,-103.764244098825387 44.387545690358827,-103.764264649212294 44.387677659170357,-103.764160551326043 44.387951214930865,-103.764540576800869 44.388042632912118,-103.764851624437995 44.388149874425885,-103.764841258550391 44.388303515682807,-103.76484332449354 44.388616502755184,-103.765188923261391 44.388927221995502,-103.765110961905023 44.389448103450221,-103.765245311197177 44.389619574129583,-103.765545516097987 44.389907903843323,-103.765765403056434 44.390420596862072,-103.766285436779711 44.391655378673697,-103.766354640463163 44.39205684519964,-103.76638734105434 44.392364628456725,-103.766410556756725 44.392776645318136,-103.765934443919321 44.393365174368313,-103.766220869020188 44.393571013181166,-103.766661604125247 44.393684955690581,-103.767294323528063 44.393734806102117,-103.767623238680557 44.394127721518785,-103.769273719703676 44.394900867042516,-103.769609703946827 44.395326786724503,-103.769732072038536 44.395745219647871,-103.769609607364416 44.396194309461826,-103.769310708537489 44.396691166475954,-103.768865902286791 44.397236074649896)",1,5,unimproved road,roads
-
-At this point the CSV can be imported into SOLR using CURL::
-
- curl "http://solr.geo-solutions.it/solr/collection1/update/csv?commit=true&separator=%2C&fieldnames=geo,id,cat_i,label_s,layer_s&header=true"
- -H 'Content-type:text/csv; charset=utf-8' --data-binary @/tmp/roads.csv
-
-Some observations:
-
- * The files gets uploaded as a ``text/csv`` file, older versions might require a ``text/plain`` mime type
- * The ``fieldnames`` overrides the CSV header and allows us to specify the field name as expected by SOLR
-
-At this point it's possible to configure a layer showing only the roads in the GeoServer UI:
-
-.. figure:: images/solr_roads_configure.png
- :align: center
-
- *Setting up the roads layer*
-
-After setting the bounding box and the proper style, the layer preview will show the roads stored
-in SOLR:
-
-.. figure:: images/solr_roads_preview.png
- :align: center
-
+.. _community_solr_load:
+
+Loading spatial data into SOLR
+------------------------------
+
+This section provides a simple example on how to convert and load a shapefile into a SOLR instance.
+For more advanced needs and details about spatial support in SOLR consult the SOLR documentation,
+making sure to read the one associated to the version at hand (spatial support is still rapidly
+evolving).
+
+The current example has been developed and tested using GDAL 1.11 and SOLR 4.8, different versions
+of the tools and server might require a different syntax for upload.
+
+The SOLR instance is supposed to have the following definitions in its schema:
+
+.. code-block:: xml
+
+
+
+
+
+The above defines "geo" as explicit fields, leaving the other types to dynamic field interpretation.
+
+The SpatialRecursivePrefixTreeFieldType accepts geometries as WKT, so as a preparation for the
+import we are going to turn a shapefile into a CSV file with WKT syntax for the geometry.
+Let's also remember that SOLR needs a unique id field for the records, and that the coordinates
+are supposed to be in WGS84.
+The shapefile in question is instead in UTM, has a linestring geometry, and some fields, cat,id and label.
+
+The following command translates the shapefile in CSV (the command should be typed in a single line,
+it has been split over multiple lines for ease of reading)::
+
+ ogr2ogr -f CSV
+ -sql 'select FID as id, cat as cat_i, label as label_s,
+ "roads" as layer FROM roads'
+ -lco geometry=AS_WKT -s_srs "EPSG:26713" -t_srs "EPSG:4326"
+ /tmp/roads.csv roads.shp
+
+Some observations:
+
+ * The SQL is used mostly to include the special FID field into the results (a unique field is required)
+ * The reprojection is performed to ensure the output geometries are in WGS84
+ * The ``layer_s`` dynamic field is added to
+
+.. note:
+
+ The "roads" syntax might not work correctly starting from GDAL 2.0, where a single quote should be
+ used instead. Starting with GDAL 2.1 it will also be possible to add a ``-lco GEOMETRY_NAME=geo``
+ to directly set the desired geometry name
+
+This will generate a CSV file looking as follows::
+
+ WKT,id,cat_i,label_s,layer
+ "LINESTRING (-103.763291353072518 44.375039982911382,-103.763393874038698 44.375282535746727,-103.764152625689903 44.376816068582023,-103.763893508430911 44.377653708326527,-103.76287152579593 44.378473197876396,-103.762075892308829 44.379009292692757,-103.76203441159079 44.379195585236509,-103.762124217456204 44.379295262047272,-103.762168141872152 44.379399997909999,-103.762326134985983 44.379527769244149,-103.763328403265064 44.380245486928708,-103.764011871363465 44.381295133519728,-103.76411460103661 44.381526706124056,-103.764953940327757 44.382396618315049,-103.765097289111338 44.382919576408355,-103.765147974157941 44.383073790503197,-103.76593766187851 44.384162856249255,-103.765899236602976 44.384607239970421,-103.765854384388703 44.384597320206453)",0,5,unimproved road,roads
+ "LINESTRING (-103.762930948900078 44.385847721442218,-103.763012156628747 44.386002223293282,-103.763510654805799 44.386297912655408,-103.763869052966967 44.386746022746649,-103.763971116268394 44.387444295314552,-103.764244098825387 44.387545690358827,-103.764264649212294 44.387677659170357,-103.764160551326043 44.387951214930865,-103.764540576800869 44.388042632912118,-103.764851624437995 44.388149874425885,-103.764841258550391 44.388303515682807,-103.76484332449354 44.388616502755184,-103.765188923261391 44.388927221995502,-103.765110961905023 44.389448103450221,-103.765245311197177 44.389619574129583,-103.765545516097987 44.389907903843323,-103.765765403056434 44.390420596862072,-103.766285436779711 44.391655378673697,-103.766354640463163 44.39205684519964,-103.76638734105434 44.392364628456725,-103.766410556756725 44.392776645318136,-103.765934443919321 44.393365174368313,-103.766220869020188 44.393571013181166,-103.766661604125247 44.393684955690581,-103.767294323528063 44.393734806102117,-103.767623238680557 44.394127721518785,-103.769273719703676 44.394900867042516,-103.769609703946827 44.395326786724503,-103.769732072038536 44.395745219647871,-103.769609607364416 44.396194309461826,-103.769310708537489 44.396691166475954,-103.768865902286791 44.397236074649896)",1,5,unimproved road,roads
+
+At this point the CSV can be imported into SOLR using CURL::
+
+ curl "http://solr.geo-solutions.it/solr/collection1/update/csv?commit=true&separator=%2C&fieldnames=geo,id,cat_i,label_s,layer_s&header=true"
+ -H 'Content-type:text/csv; charset=utf-8' --data-binary @/tmp/roads.csv
+
+Some observations:
+
+ * The files gets uploaded as a ``text/csv`` file, older versions might require a ``text/plain`` mime type
+ * The ``fieldnames`` overrides the CSV header and allows us to specify the field name as expected by SOLR
+
+At this point it's possible to configure a layer showing only the roads in the GeoServer UI:
+
+.. figure:: images/solr_roads_configure.png
+ :align: center
+
+ *Setting up the roads layer*
+
+After setting the bounding box and the proper style, the layer preview will show the roads stored
+in SOLR:
+
+.. figure:: images/solr_roads_preview.png
+ :align: center
+
*Preview roads from SOLR layer*
\ No newline at end of file
diff --git a/doc/en/user/source/community/solr/optimize.rst b/doc/en/user/source/community/solr/optimize.rst
index 3321792370e..e1e698d3c5b 100644
--- a/doc/en/user/source/community/solr/optimize.rst
+++ b/doc/en/user/source/community/solr/optimize.rst
@@ -1,50 +1,50 @@
-.. _community_solr_optimize:
-
-Optimize rendering of complex polygons
---------------------------------------
-
-Rendering large maps with complex polygons, to show the overall distribution of the data, can
-take a significant toll, especially if GeoServer cannot connect to the SOLR server via a high
-speed network.
-
-A common approach to handle this issue is to add a second geometry to the SOLR documents,
-representing the centroid of the polygon, and using that one to render the features when
-fairly zoomed out.
-
-Once the SOLR documents have been updated with a centroid column, and it has been populated,
-the column can be added as a secondary geometry. Make sure to keep the polygonal geometry
-as the default one:
-
-.. figure:: images/optimize_ft1.png
- :align: center
-
-... (other fields omitted)
-
-.. figure:: images/optimize_ft2.png
- :align: center
-
-
- *Configuring a layer with multiple geometries*
-
-With this setup the polygonal geometry will still be used for all spatial filters, and for
-rendering, unless the style otherwise specifical demands for the centroid.
-
-Then, a style with scale dependencies can be setup in order to fetch only then centroids
-when fairly zoomed out, like in the following CSS example: ::
-
- [@scale > 50000] {
- geometry: [centroid];
- mark: symbol(square);
- }
- :mark {
- fill: red;
- size: 3;
- }
- [@scale <= 50000] {
- fill: red;
- stroke: black;
- }
-
-Using this style the ``spatial`` field will still be used to resolve the BBOX filter implicit
-in the WMS requests, but only the much smaller ``centroid`` one will be transferred to GeoServer
+.. _community_solr_optimize:
+
+Optimize rendering of complex polygons
+--------------------------------------
+
+Rendering large maps with complex polygons, to show the overall distribution of the data, can
+take a significant toll, especially if GeoServer cannot connect to the SOLR server via a high
+speed network.
+
+A common approach to handle this issue is to add a second geometry to the SOLR documents,
+representing the centroid of the polygon, and using that one to render the features when
+fairly zoomed out.
+
+Once the SOLR documents have been updated with a centroid column, and it has been populated,
+the column can be added as a secondary geometry. Make sure to keep the polygonal geometry
+as the default one:
+
+.. figure:: images/optimize_ft1.png
+ :align: center
+
+... (other fields omitted)
+
+.. figure:: images/optimize_ft2.png
+ :align: center
+
+
+ *Configuring a layer with multiple geometries*
+
+With this setup the polygonal geometry will still be used for all spatial filters, and for
+rendering, unless the style otherwise specifical demands for the centroid.
+
+Then, a style with scale dependencies can be setup in order to fetch only then centroids
+when fairly zoomed out, like in the following CSS example: ::
+
+ [@scale > 50000] {
+ geometry: [centroid];
+ mark: symbol(square);
+ }
+ :mark {
+ fill: red;
+ size: 3;
+ }
+ [@scale <= 50000] {
+ fill: red;
+ stroke: black;
+ }
+
+Using this style the ``spatial`` field will still be used to resolve the BBOX filter implicit
+in the WMS requests, but only the much smaller ``centroid`` one will be transferred to GeoServer
for rendering.
\ No newline at end of file
diff --git a/doc/en/user/source/configuration/crshandling/configurecrs.rst b/doc/en/user/source/configuration/crshandling/configurecrs.rst
index 2b50d96caf3..2b380ef6357 100644
--- a/doc/en/user/source/configuration/crshandling/configurecrs.rst
+++ b/doc/en/user/source/configuration/crshandling/configurecrs.rst
@@ -1,37 +1,37 @@
-.. _crs_configure:
-
-Coordinate Reference System Configuration
-=========================================
-
-When adding data, GeoServer tries to inspect data headers looking for an EPSG code:
-
-* If the data has a CRS with an explicit EPSG code and the full CRS definition behind the code matches the one in GeoServer, the CRS will be already set for the data.
-* If the data has a CRS but no EPSG code, you can use the :guilabel:`Find` option on the :ref:`data_webadmin_layers` page to make GeoServer perform a lookup operation where the data CRS is compared against every other known CRS. If this succeeds, an EPSG code will be selected. The common case for a CRS that has no EPSG code is shapefiles whose .PRJ file contains a valid WKT string without the EPSG identifiers (as these are optional).
-
-If an EPSG code cannot be found, then either the data has no CRS or it is unknown to GeoServer. In this case, there are a few options:
-
-* Force the declared CRS, ignoring the native one. This is the best solution if the native CRS is known to be wrong.
-* Reproject from the native to the declared CRS. This is the best solution if the native CRS is correct, but cannot be matched to an EPSG number. (An alternative is to add a custom EPSG code that matches exactly the native SRS. See the section on :ref:`crs_custom` for more information.)
-
-If your data has no native CRS information, the only option is to specify/force an EPSG code.
-
-Increasing Comparison Tolerance
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Decimal numbers comparisons are made using a comparison tolerance. This means, as an instance, that an ellipsoid's semi_major axis
-equals a candidate EPSG's ellipsoid semi_major axis only if their difference is within that tolerance.
-Default value is 10^-9 although it can be changed by setting a COMPARISON_TOLERANCE Java System property to your container's JVM to specify a different value.
-
-.. warning::
-
- The default value should be changed only if you are aware of use cases which require a wider tolerance.
- Don't change it unless really needed (See the following example).
-
-Example
-.......
-
-* Your sample dataset is known to be a LambertConformalConic projection and the related EPSG code defines latitude_of_origin value = 25.0.
-* The coverageStore plugin is exposing raster projection details through a third party library which provides projection parameter definitions as float numbers.
-* Due to the underlying math computations occurring in that third party library, the exposed projection parameters are subject to some accuracy loss, so that the provided latitude_of_origin is something like 25.0000012 whilst all the other params match the EPSG definition.
-* You notice that the native CRS isn't properly recognized as the expected EPSG due to that small difference in latitude_of_origin
-
-In that case you could consider increasing a bit the tolerance.
+.. _crs_configure:
+
+Coordinate Reference System Configuration
+=========================================
+
+When adding data, GeoServer tries to inspect data headers looking for an EPSG code:
+
+* If the data has a CRS with an explicit EPSG code and the full CRS definition behind the code matches the one in GeoServer, the CRS will be already set for the data.
+* If the data has a CRS but no EPSG code, you can use the :guilabel:`Find` option on the :ref:`data_webadmin_layers` page to make GeoServer perform a lookup operation where the data CRS is compared against every other known CRS. If this succeeds, an EPSG code will be selected. The common case for a CRS that has no EPSG code is shapefiles whose .PRJ file contains a valid WKT string without the EPSG identifiers (as these are optional).
+
+If an EPSG code cannot be found, then either the data has no CRS or it is unknown to GeoServer. In this case, there are a few options:
+
+* Force the declared CRS, ignoring the native one. This is the best solution if the native CRS is known to be wrong.
+* Reproject from the native to the declared CRS. This is the best solution if the native CRS is correct, but cannot be matched to an EPSG number. (An alternative is to add a custom EPSG code that matches exactly the native SRS. See the section on :ref:`crs_custom` for more information.)
+
+If your data has no native CRS information, the only option is to specify/force an EPSG code.
+
+Increasing Comparison Tolerance
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Decimal numbers comparisons are made using a comparison tolerance. This means, as an instance, that an ellipsoid's semi_major axis
+equals a candidate EPSG's ellipsoid semi_major axis only if their difference is within that tolerance.
+Default value is 10^-9 although it can be changed by setting a COMPARISON_TOLERANCE Java System property to your container's JVM to specify a different value.
+
+.. warning::
+
+ The default value should be changed only if you are aware of use cases which require a wider tolerance.
+ Don't change it unless really needed (See the following example).
+
+Example
+.......
+
+* Your sample dataset is known to be a LambertConformalConic projection and the related EPSG code defines latitude_of_origin value = 25.0.
+* The coverageStore plugin is exposing raster projection details through a third party library which provides projection parameter definitions as float numbers.
+* Due to the underlying math computations occurring in that third party library, the exposed projection parameters are subject to some accuracy loss, so that the provided latitude_of_origin is something like 25.0000012 whilst all the other params match the EPSG definition.
+* You notice that the native CRS isn't properly recognized as the expected EPSG due to that small difference in latitude_of_origin
+
+In that case you could consider increasing a bit the tolerance.
diff --git a/doc/en/user/source/configuration/crshandling/customcrs.rst b/doc/en/user/source/configuration/crshandling/customcrs.rst
index e694a370fa8..4d622360a3b 100644
--- a/doc/en/user/source/configuration/crshandling/customcrs.rst
+++ b/doc/en/user/source/configuration/crshandling/customcrs.rst
@@ -1,128 +1,128 @@
-.. _crs_custom:
-
-Custom CRS Definitions
-======================
-
-Add a custom CRS
-----------------
-
-This example shows how to add a custom projection in GeoServer.
-
-#. The projection parameters need to be provided as a WKT (well known text) definition. The code sample below is just an example::
-
- PROJCS["NAD83 / Austin",
- GEOGCS["NAD83",
- DATUM["North_American_Datum_1983",
- SPHEROID["GRS 1980", 6378137.0, 298.257222101],
- TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
- PRIMEM["Greenwich", 0.0],
- UNIT["degree", 0.017453292519943295],
- AXIS["Lon", EAST],
- AXIS["Lat", NORTH]],
- PROJECTION["Lambert_Conformal_Conic_2SP"],
- PARAMETER["central_meridian", -100.333333333333],
- PARAMETER["latitude_of_origin", 29.6666666666667],
- PARAMETER["standard_parallel_1", 31.883333333333297],
- PARAMETER["false_easting", 2296583.333333],
- PARAMETER["false_northing", 9842500.0],
- PARAMETER["standard_parallel_2", 30.1166666666667],
- UNIT["m", 1.0],
- AXIS["x", EAST],
- AXIS["y", NORTH],
- AUTHORITY["EPSG","100002"]]
-
- .. note:: This code sample has been formatted for readability. The information will need to be provided on a single line instead, or with backslash characters at the end of every line (except the last one).
-
-#. Go into the :file:`user_projections` directory inside your data directory, and open the :file:`epsg.properties` file. If this file doesn't exist, you can create it.
-
-#. Insert the code WKT for the projection at the end of the file (on a single line or with backslash characters)::
-
- 100002=PROJCS["NAD83 / Austin", \
- GEOGCS["NAD83", \
- DATUM["North_American_Datum_1983", \
- SPHEROID["GRS 1980", 6378137.0, 298.257222101], \
- TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]], \
- PRIMEM["Greenwich", 0.0], \
- UNIT["degree", 0.017453292519943295], \
- AXIS["Lon", EAST], \
- AXIS["Lat", NORTH]], \
- PROJECTION["Lambert_Conformal_Conic_2SP"], \
- PARAMETER["central_meridian", -100.333333333333], \
- PARAMETER["latitude_of_origin", 29.6666666666667], \
- PARAMETER["standard_parallel_1", 31.883333333333297], \
- PARAMETER["false_easting", 2296583.333333], \
- PARAMETER["false_northing", 9842500.0], \
- PARAMETER["standard_parallel_2", 30.1166666666667], \
- UNIT["m", 1.0], \
- AXIS["x", EAST], \
- AXIS["y", NORTH], \
- AUTHORITY["EPSG","100002"]]
-
-.. note:: Note the number that precedes the WKT. This will determine the EPSG code. So in this example, the EPSG code is 100002.
-
-#. Save the file.
-
-#. Restart GeoServer.
-
-#. Verify that the CRS has been properly parsed by navigating to the :ref:`srs_list` page in the :ref:`web_admin`.
-
-#. If the projection wasn't listed, examine the logs for any errors.
-
-Override an official EPSG code
-------------------------------
-
-In some situations it is necessary to override an official EPSG code with a custom definition. A common case is the need to change the TOWGS84 parameters in order to get better reprojection accuracy in specific areas.
-
-The GeoServer referencing subsystem checks the existence of another property file, :file:`epsg_overrides.properties`, whose format is the same as :file:`epsg.properties`. Any definition contained in :file:`epsg_overrides.properties` will **override** the EPSG code, while definitions stored in :file:`epsg.proeprties` can only **add** to the database.
-
-Special care must be taken when overriding the Datum parameters, in particular the **TOWGS84** parameters. To make sure the override parameters are actually used the code of the Datum must be removed, otherwise the referencing subsystem will keep on reading the official database in search of the best Datum shift method (grid, 7 or 5 parameters transformation, plain affine transform).
-
-For example, if you need to override the official **TOWGS84** parameters of EPSG:23031::
-
- PROJCS["ED50 / UTM zone 31N",
- GEOGCS["ED50",
- DATUM["European Datum 1950",
- SPHEROID["International 1924", 6378388.0, 297.0, AUTHORITY["EPSG","7022"]],
- TOWGS84[-157.89, -17.16, -78.41, 2.118, 2.697, -1.434, -1.1097046576093785],
- AUTHORITY["EPSG","6230"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH],
- AUTHORITY["EPSG","4230"]],
- PROJECTION["Transverse_Mercator"],
- PARAMETER["central_meridian", 3.0],
- PARAMETER["latitude_of_origin", 0.0],
- PARAMETER["scale_factor", 0.9996],
- PARAMETER["false_easting", 500000.0],
- PARAMETER["false_northing", 0.0],
- UNIT["m", 1.0],
- AXIS["Easting", EAST],
- AXIS["Northing", NORTH],
- AUTHORITY["EPSG","23031"]]
-
-You should write the following (in a single line, here it's reported formatted over multiple lines for readability)::
-
- 23031=
- PROJCS["ED50 / UTM zone 31N",
- GEOGCS["ED50",
- DATUM["European Datum 1950",
- SPHEROID["International 1924", 6378388.0, 297.0, AUTHORITY["EPSG","7022"]],
- TOWGS84[-136.65549, -141.4658, -167.29848, 2.093088, 0.001405, 0.107709, 11.54611],
- AUTHORITY["EPSG","6230"]],
- PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
- UNIT["degree", 0.017453292519943295],
- AXIS["Geodetic longitude", EAST],
- AXIS["Geodetic latitude", NORTH]],
- PROJECTION["Transverse_Mercator"],
- PARAMETER["central_meridian", 3.0],
- PARAMETER["latitude_of_origin", 0.0],
- PARAMETER["scale_factor", 0.9996],
- PARAMETER["false_easting", 500000.0],
- PARAMETER["false_northing", 0.0],
- UNIT["m", 1.0],
- AXIS["Easting", EAST],
- AXIS["Northing", NORTH],
- AUTHORITY["EPSG","23031"]]
-
+.. _crs_custom:
+
+Custom CRS Definitions
+======================
+
+Add a custom CRS
+----------------
+
+This example shows how to add a custom projection in GeoServer.
+
+#. The projection parameters need to be provided as a WKT (well known text) definition. The code sample below is just an example::
+
+ PROJCS["NAD83 / Austin",
+ GEOGCS["NAD83",
+ DATUM["North_American_Datum_1983",
+ SPHEROID["GRS 1980", 6378137.0, 298.257222101],
+ TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
+ PRIMEM["Greenwich", 0.0],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Lon", EAST],
+ AXIS["Lat", NORTH]],
+ PROJECTION["Lambert_Conformal_Conic_2SP"],
+ PARAMETER["central_meridian", -100.333333333333],
+ PARAMETER["latitude_of_origin", 29.6666666666667],
+ PARAMETER["standard_parallel_1", 31.883333333333297],
+ PARAMETER["false_easting", 2296583.333333],
+ PARAMETER["false_northing", 9842500.0],
+ PARAMETER["standard_parallel_2", 30.1166666666667],
+ UNIT["m", 1.0],
+ AXIS["x", EAST],
+ AXIS["y", NORTH],
+ AUTHORITY["EPSG","100002"]]
+
+ .. note:: This code sample has been formatted for readability. The information will need to be provided on a single line instead, or with backslash characters at the end of every line (except the last one).
+
+#. Go into the :file:`user_projections` directory inside your data directory, and open the :file:`epsg.properties` file. If this file doesn't exist, you can create it.
+
+#. Insert the code WKT for the projection at the end of the file (on a single line or with backslash characters)::
+
+ 100002=PROJCS["NAD83 / Austin", \
+ GEOGCS["NAD83", \
+ DATUM["North_American_Datum_1983", \
+ SPHEROID["GRS 1980", 6378137.0, 298.257222101], \
+ TOWGS84[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]], \
+ PRIMEM["Greenwich", 0.0], \
+ UNIT["degree", 0.017453292519943295], \
+ AXIS["Lon", EAST], \
+ AXIS["Lat", NORTH]], \
+ PROJECTION["Lambert_Conformal_Conic_2SP"], \
+ PARAMETER["central_meridian", -100.333333333333], \
+ PARAMETER["latitude_of_origin", 29.6666666666667], \
+ PARAMETER["standard_parallel_1", 31.883333333333297], \
+ PARAMETER["false_easting", 2296583.333333], \
+ PARAMETER["false_northing", 9842500.0], \
+ PARAMETER["standard_parallel_2", 30.1166666666667], \
+ UNIT["m", 1.0], \
+ AXIS["x", EAST], \
+ AXIS["y", NORTH], \
+ AUTHORITY["EPSG","100002"]]
+
+.. note:: Note the number that precedes the WKT. This will determine the EPSG code. So in this example, the EPSG code is 100002.
+
+#. Save the file.
+
+#. Restart GeoServer.
+
+#. Verify that the CRS has been properly parsed by navigating to the :ref:`srs_list` page in the :ref:`web_admin`.
+
+#. If the projection wasn't listed, examine the logs for any errors.
+
+Override an official EPSG code
+------------------------------
+
+In some situations it is necessary to override an official EPSG code with a custom definition. A common case is the need to change the TOWGS84 parameters in order to get better reprojection accuracy in specific areas.
+
+The GeoServer referencing subsystem checks the existence of another property file, :file:`epsg_overrides.properties`, whose format is the same as :file:`epsg.properties`. Any definition contained in :file:`epsg_overrides.properties` will **override** the EPSG code, while definitions stored in :file:`epsg.proeprties` can only **add** to the database.
+
+Special care must be taken when overriding the Datum parameters, in particular the **TOWGS84** parameters. To make sure the override parameters are actually used the code of the Datum must be removed, otherwise the referencing subsystem will keep on reading the official database in search of the best Datum shift method (grid, 7 or 5 parameters transformation, plain affine transform).
+
+For example, if you need to override the official **TOWGS84** parameters of EPSG:23031::
+
+ PROJCS["ED50 / UTM zone 31N",
+ GEOGCS["ED50",
+ DATUM["European Datum 1950",
+ SPHEROID["International 1924", 6378388.0, 297.0, AUTHORITY["EPSG","7022"]],
+ TOWGS84[-157.89, -17.16, -78.41, 2.118, 2.697, -1.434, -1.1097046576093785],
+ AUTHORITY["EPSG","6230"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH],
+ AUTHORITY["EPSG","4230"]],
+ PROJECTION["Transverse_Mercator"],
+ PARAMETER["central_meridian", 3.0],
+ PARAMETER["latitude_of_origin", 0.0],
+ PARAMETER["scale_factor", 0.9996],
+ PARAMETER["false_easting", 500000.0],
+ PARAMETER["false_northing", 0.0],
+ UNIT["m", 1.0],
+ AXIS["Easting", EAST],
+ AXIS["Northing", NORTH],
+ AUTHORITY["EPSG","23031"]]
+
+You should write the following (in a single line, here it's reported formatted over multiple lines for readability)::
+
+ 23031=
+ PROJCS["ED50 / UTM zone 31N",
+ GEOGCS["ED50",
+ DATUM["European Datum 1950",
+ SPHEROID["International 1924", 6378388.0, 297.0, AUTHORITY["EPSG","7022"]],
+ TOWGS84[-136.65549, -141.4658, -167.29848, 2.093088, 0.001405, 0.107709, 11.54611],
+ AUTHORITY["EPSG","6230"]],
+ PRIMEM["Greenwich", 0.0, AUTHORITY["EPSG","8901"]],
+ UNIT["degree", 0.017453292519943295],
+ AXIS["Geodetic longitude", EAST],
+ AXIS["Geodetic latitude", NORTH]],
+ PROJECTION["Transverse_Mercator"],
+ PARAMETER["central_meridian", 3.0],
+ PARAMETER["latitude_of_origin", 0.0],
+ PARAMETER["scale_factor", 0.9996],
+ PARAMETER["false_easting", 500000.0],
+ PARAMETER["false_northing", 0.0],
+ UNIT["m", 1.0],
+ AXIS["Easting", EAST],
+ AXIS["Northing", NORTH],
+ AUTHORITY["EPSG","23031"]]
+
The definition has been changed in two places, the **TOWGS84** paramerers, and the Datum code, ``AUTHORITY["EPSG","4230"]``, has been removed.
\ No newline at end of file
diff --git a/doc/en/user/source/configuration/crshandling/manualepsg.rst b/doc/en/user/source/configuration/crshandling/manualepsg.rst
index dda32d03089..876f4c4b348 100644
--- a/doc/en/user/source/configuration/crshandling/manualepsg.rst
+++ b/doc/en/user/source/configuration/crshandling/manualepsg.rst
@@ -1,64 +1,64 @@
-.. _crs_manual_epsg:
-
-Manually editing the EPSG database
-==================================
-
-.. warning:: These instructions are very advanced, and are here mainly for the curious who want to know details about the EPSG database subsystem.
-
-To define a custom projection, edit the EPSG.sql file, which is used to create the cached EPSG database.
-
-#. Navigate to the :file:`WEB-INF/lib` directory
-
-#. Uncompress the :file:`gt2-epsg-h.jar` file. On Linux, the command is::
-
- jar xvf gt2-epsg-h.jar
-
-#. Open :file:`org/geotools/referencing/factory/epsg/EPSG.sql` with a text editor. To add a custom projection, these entries are essential:
-
- #. An entry in the EPSG_COORDINATEREFERENCESYSTEM table::
-
- (41111,'WGC 84 / WRF Lambert',1324,'projected',4400,NULL,4326,20000,NULL,NULL,'US Nat. scale mapping.','Entered by Alex Petkov','Missoula Firelab WRF','WRF','2000-10-19','',1,0),
-
- where:
-
- * **1324** is the EPSG_AREA code that describes the area covered by my projection
- * **4400** is the EPSG_COORDINATESYSTEM code for my projection
- * **20000** is the EPSG_COORDOPERATIONPARAMVALUE key for the array that contains my projection parameters
-
- #. An entry in the EPSG_COORDOPERATIONPARAMVALUE table::
-
- (20000,9802,8821,40,'',9102), //latitude of origin
- (20000,9802,8822,-97.0,'',9102), //central meridian
- (20000,9802,8823,33,'',9110), //st parallel 1
- (20000,9802,8824,45,'',9110), //st parallel 2
- (20000,9802,8826,0.0,'',9001), //false easting
- (20000,9802,8827,0.0,'',9001) //false northing
-
- where:
-
- * **9802** is the EPSG_COORDOPERATIONMETHOD key for the Lambert Conic Conformal (2SP) formula
-
- #. An entry in the EPSG_COORDOPERATION table:
-
- (20000,'WRF Lambert','conversion',NULL,NULL,'',NULL,1324,'Used for weather forecasting.',0.0,9802,NULL,NULL,'Used with the WRF-Chem model for weather forecasting','Firelab in Missoula, MT','EPSG','2005-11-23','2005.01',1,0)
-
- where:
-
- * **1324** is the EPSG_AREA code that describes the area covered by my projection
- * **9802** is the EPSG_COORDOPERATIONMETHOD key for the Lambert Conic Conformal (2SP) formula
-
-.. note:: Observe the commas. If you enter a line that is at the end of an INSERT statement, the comma is omitted (make sure the row before that has a comma at the end). Otherwise, add a comma at the end of your entry.
-
-#. After all edits, save the file and exit.
-
-#. Compress the gt2-epsg-h.jar file. On Linux, the command is::
-
- jar -Mcvf gt2-epsg-h.jar META-INF org
-
-#. Remove the cached copy of the EPSG database, so that can be recreated. On Linux, the command is::
-
- rm -rf /tmp/Geotools/Databases/HSQL
-
-#. Restart GeoServer.
-
-The new projection will be successfully parsed. Verify that the CRS has been properly parsed by navigating to the :ref:`srs_list` page in the :ref:`web_admin`.
+.. _crs_manual_epsg:
+
+Manually editing the EPSG database
+==================================
+
+.. warning:: These instructions are very advanced, and are here mainly for the curious who want to know details about the EPSG database subsystem.
+
+To define a custom projection, edit the EPSG.sql file, which is used to create the cached EPSG database.
+
+#. Navigate to the :file:`WEB-INF/lib` directory
+
+#. Uncompress the :file:`gt2-epsg-h.jar` file. On Linux, the command is::
+
+ jar xvf gt2-epsg-h.jar
+
+#. Open :file:`org/geotools/referencing/factory/epsg/EPSG.sql` with a text editor. To add a custom projection, these entries are essential:
+
+ #. An entry in the EPSG_COORDINATEREFERENCESYSTEM table::
+
+ (41111,'WGC 84 / WRF Lambert',1324,'projected',4400,NULL,4326,20000,NULL,NULL,'US Nat. scale mapping.','Entered by Alex Petkov','Missoula Firelab WRF','WRF','2000-10-19','',1,0),
+
+ where:
+
+ * **1324** is the EPSG_AREA code that describes the area covered by my projection
+ * **4400** is the EPSG_COORDINATESYSTEM code for my projection
+ * **20000** is the EPSG_COORDOPERATIONPARAMVALUE key for the array that contains my projection parameters
+
+ #. An entry in the EPSG_COORDOPERATIONPARAMVALUE table::
+
+ (20000,9802,8821,40,'',9102), //latitude of origin
+ (20000,9802,8822,-97.0,'',9102), //central meridian
+ (20000,9802,8823,33,'',9110), //st parallel 1
+ (20000,9802,8824,45,'',9110), //st parallel 2
+ (20000,9802,8826,0.0,'',9001), //false easting
+ (20000,9802,8827,0.0,'',9001) //false northing
+
+ where:
+
+ * **9802** is the EPSG_COORDOPERATIONMETHOD key for the Lambert Conic Conformal (2SP) formula
+
+ #. An entry in the EPSG_COORDOPERATION table:
+
+ (20000,'WRF Lambert','conversion',NULL,NULL,'',NULL,1324,'Used for weather forecasting.',0.0,9802,NULL,NULL,'Used with the WRF-Chem model for weather forecasting','Firelab in Missoula, MT','EPSG','2005-11-23','2005.01',1,0)
+
+ where:
+
+ * **1324** is the EPSG_AREA code that describes the area covered by my projection
+ * **9802** is the EPSG_COORDOPERATIONMETHOD key for the Lambert Conic Conformal (2SP) formula
+
+.. note:: Observe the commas. If you enter a line that is at the end of an INSERT statement, the comma is omitted (make sure the row before that has a comma at the end). Otherwise, add a comma at the end of your entry.
+
+#. After all edits, save the file and exit.
+
+#. Compress the gt2-epsg-h.jar file. On Linux, the command is::
+
+ jar -Mcvf gt2-epsg-h.jar META-INF org
+
+#. Remove the cached copy of the EPSG database, so that can be recreated. On Linux, the command is::
+
+ rm -rf /tmp/Geotools/Databases/HSQL
+
+#. Restart GeoServer.
+
+The new projection will be successfully parsed. Verify that the CRS has been properly parsed by navigating to the :ref:`srs_list` page in the :ref:`web_admin`.
diff --git a/doc/en/user/source/data/app-schema/joining.rst b/doc/en/user/source/data/app-schema/joining.rst
index e24ac2abd70..4c848820f6b 100644
--- a/doc/en/user/source/data/app-schema/joining.rst
+++ b/doc/en/user/source/data/app-schema/joining.rst
@@ -1,109 +1,109 @@
-.. _app-schema.joining:
-
-Joining Support For Performance
-===============================
-
-App-schema joining is a optional configuration parameter that tells app-schema to use a different implementation for :ref:`app-schema.feature-chaining`,
-which in many cases can improve performance considerably, by reducing the amount of SQL queries sent to the DBMS.
-
-Conditions
-----------
-In order to use App-schema Joining, the following configuration conditions must be met:
-
-* All feature mappings used must be mapped to JDBC datastores.
-
-* All feature mappings that are chained to each other must map to the same physical database.
-
-* In your mappings, there are restrictions on the CQL expressions specified in the of both the referencing field in the parent feature as well as the referenced field in the nested feature (like FEATURE_LINK). Any operators or functions used in this expression must be supported by the filter capibilities, i.e. geotools must be able to translate them directly to SQL code. This can be different for each DBMS, though as a general rule it can assumed that comparison operators, logical operators and arithmetic operators are all supported but functions are not. Using simple field names for feature chaining is guaranteed to always work.
-
-Failing to comply with any of these three restrictions when turning on Joining will result in exceptions thrown at run-time.
-
-When using app-schema with Joining turned on, the following restrictions exist with respect to normal behaviour:
-
-* XPaths specified inside Filters do not support handling referenced features (see :ref:`app-schema.feature-chaining-by-reference`) as if they were actual nested features, i.e. XPaths can only be evaluated when they can be evaluated against the actual XML code produced by WFS according to the XPath standard.
-
-Configuration
--------------
-Joining is turned on by default. It is disabled by adding this simple line to your app-schema.properties file (see :ref:`app-schema.property-interpolation`) ::
-
- app-schema.joining = false
-
-Or, alternatively, by setting the value of the Java System Property "app-schema.joining" to "false", for example ::
-
- java -DGEOSERVER_DATA_DIR=... -Dapp-schema.joining=false Start
-
-Not specifying "app-schema.joining" parameter will enable joining by default.
-
-Database Design Guidelines
---------------------------
-
-* Databases should be optimised for fast on-the-fly joining and ordering.
-
-* Make sure to put indexes on all fields used as identifiers and for feature chaining, unique indexes where possible. Lack of indices may result in data being encoded in the wrong order or corrupted output when feature chaining is involved.
-
-* Map your features preferably to normalised tables.
-
-* It is recommended to apply feature chaining to regular one-to-many relationships, i.e. there should be a unique constraint defined on one of the fields used for the chaining, and if possible a foreign key constraint defined on the other field.
-
-Effects on Performance
-----------------------
-
-Typical curves of response time for configurations with and without joining against the amount of features
-produced will be shaped like this:
-
-.. image:: joining.png
-
-In the default implementation, response time increases rapidly with respect to the amount of produced features. This is because feature chaining
-is implemented by sending multiple SQL requests to the DBMS per feature, so the amount of requests increases with the amount
-of features produced. When Joining is turned on, response time will be almost constant with respect to the number of features. This is because in this implementation a small amount of larger queries is sent to the DBMS, independant of the amount of features produced.
-In summary, difference in performance becomes greater as the amount of features requested gets bigger. General performance of joining will be dependant on database and mapping design (see above) and database size.
-
-Using joining is strongly recommended when a large number of features need to be produced, for example
-when producing maps with WMS (see :ref:`app-schema.wms-support`).
-
-Optimising the performance of the database will maximise the benefit of using joining, including for small queries.
-
-Native Encoding of Filters on Nested Attributes
------------------------------------------------
-
-When App-Schema Joining is active, filters operating on nested attributes (i.e. attributes of features that are joined to the queried type via :ref:`app-schema.feature-chaining`) are translated to SQL and executed directly in the database backend, rather than being evaluated in memory after all features have been loaded (which was standard behavior in earlier versions of GeoServer). Native encoding can yield significant performance improvements, especially when the total number of features in the database is high (several thousands or more), but only a few of them would satisfy the filter.
-
-There are, however, a few limitations in the current implementation:
-
-1. Joining support must not have been explicitly disabled and all its pre-conditions must be met (see above)
-2. Only binary comparison operators (e.g. ``PropertyIsEqualTo``, ``PropertyIsGreaterThan``, etc...), ``PropertyIsLike`` and ``PropertyIsNull`` filters are translated to SQL
-3. Filters involving conditional polymorphic mappings are evaluated in memory
-4. Filters comparing two or more different nested attributes are evaluated in memory
-5. Filters matching multiple nested attribute mappings are evaluated in memory
-
-Much like joining support, native encoding of nested filters is turned on by default, and it is disabled by adding to your app-schema.properties file the line ::
-
- app-schema.encodeNestedFilters = false
-
-Or, alternatively, by setting the value of the Java System Property "app-schema.encodeNestedFilters" to "false", for example ::
-
- java -DGEOSERVER_DATA_DIR=... -Dapp-schema.encodeNestedFilters=false Start
-
-UNION performance improvement for OR conditions
------------------------------------------------
-
-OR conditions are difficult to optimize for postgresql and are usually slow. App-Schema improves OR condition performance using UNION clauses instead OR for nested filter subqueries.
-
-With UNION improvement enabled main OR binary operator on nested filter subquery will rebuild normal OR query like::
-
- SELECT id, name FROM table WHERE name = "A" OR name = "B"
-
-to::
-
- SELECT id, name FROM table WHERE name = "A" UNION SELECT id, name FROM table WHERE name = "B"
-
-UNION improvement is enabled by default, and it is disabled by adding to your app-schema.properties file the line ::
-
- app-schema.orUnionReplace = false
-
-Or, alternatively, by setting the value of the Java System Property "app-schema.orUnionReplace" to "false", for example ::
-
- java -DGEOSERVER_DATA_DIR=... -Dapp-schema.orUnionReplace=false Start
-
-.. note::
+.. _app-schema.joining:
+
+Joining Support For Performance
+===============================
+
+App-schema joining is a optional configuration parameter that tells app-schema to use a different implementation for :ref:`app-schema.feature-chaining`,
+which in many cases can improve performance considerably, by reducing the amount of SQL queries sent to the DBMS.
+
+Conditions
+----------
+In order to use App-schema Joining, the following configuration conditions must be met:
+
+* All feature mappings used must be mapped to JDBC datastores.
+
+* All feature mappings that are chained to each other must map to the same physical database.
+
+* In your mappings, there are restrictions on the CQL expressions specified in the of both the referencing field in the parent feature as well as the referenced field in the nested feature (like FEATURE_LINK). Any operators or functions used in this expression must be supported by the filter capibilities, i.e. geotools must be able to translate them directly to SQL code. This can be different for each DBMS, though as a general rule it can assumed that comparison operators, logical operators and arithmetic operators are all supported but functions are not. Using simple field names for feature chaining is guaranteed to always work.
+
+Failing to comply with any of these three restrictions when turning on Joining will result in exceptions thrown at run-time.
+
+When using app-schema with Joining turned on, the following restrictions exist with respect to normal behaviour:
+
+* XPaths specified inside Filters do not support handling referenced features (see :ref:`app-schema.feature-chaining-by-reference`) as if they were actual nested features, i.e. XPaths can only be evaluated when they can be evaluated against the actual XML code produced by WFS according to the XPath standard.
+
+Configuration
+-------------
+Joining is turned on by default. It is disabled by adding this simple line to your app-schema.properties file (see :ref:`app-schema.property-interpolation`) ::
+
+ app-schema.joining = false
+
+Or, alternatively, by setting the value of the Java System Property "app-schema.joining" to "false", for example ::
+
+ java -DGEOSERVER_DATA_DIR=... -Dapp-schema.joining=false Start
+
+Not specifying "app-schema.joining" parameter will enable joining by default.
+
+Database Design Guidelines
+--------------------------
+
+* Databases should be optimised for fast on-the-fly joining and ordering.
+
+* Make sure to put indexes on all fields used as identifiers and for feature chaining, unique indexes where possible. Lack of indices may result in data being encoded in the wrong order or corrupted output when feature chaining is involved.
+
+* Map your features preferably to normalised tables.
+
+* It is recommended to apply feature chaining to regular one-to-many relationships, i.e. there should be a unique constraint defined on one of the fields used for the chaining, and if possible a foreign key constraint defined on the other field.
+
+Effects on Performance
+----------------------
+
+Typical curves of response time for configurations with and without joining against the amount of features
+produced will be shaped like this:
+
+.. image:: joining.png
+
+In the default implementation, response time increases rapidly with respect to the amount of produced features. This is because feature chaining
+is implemented by sending multiple SQL requests to the DBMS per feature, so the amount of requests increases with the amount
+of features produced. When Joining is turned on, response time will be almost constant with respect to the number of features. This is because in this implementation a small amount of larger queries is sent to the DBMS, independant of the amount of features produced.
+In summary, difference in performance becomes greater as the amount of features requested gets bigger. General performance of joining will be dependant on database and mapping design (see above) and database size.
+
+Using joining is strongly recommended when a large number of features need to be produced, for example
+when producing maps with WMS (see :ref:`app-schema.wms-support`).
+
+Optimising the performance of the database will maximise the benefit of using joining, including for small queries.
+
+Native Encoding of Filters on Nested Attributes
+-----------------------------------------------
+
+When App-Schema Joining is active, filters operating on nested attributes (i.e. attributes of features that are joined to the queried type via :ref:`app-schema.feature-chaining`) are translated to SQL and executed directly in the database backend, rather than being evaluated in memory after all features have been loaded (which was standard behavior in earlier versions of GeoServer). Native encoding can yield significant performance improvements, especially when the total number of features in the database is high (several thousands or more), but only a few of them would satisfy the filter.
+
+There are, however, a few limitations in the current implementation:
+
+1. Joining support must not have been explicitly disabled and all its pre-conditions must be met (see above)
+2. Only binary comparison operators (e.g. ``PropertyIsEqualTo``, ``PropertyIsGreaterThan``, etc...), ``PropertyIsLike`` and ``PropertyIsNull`` filters are translated to SQL
+3. Filters involving conditional polymorphic mappings are evaluated in memory
+4. Filters comparing two or more different nested attributes are evaluated in memory
+5. Filters matching multiple nested attribute mappings are evaluated in memory
+
+Much like joining support, native encoding of nested filters is turned on by default, and it is disabled by adding to your app-schema.properties file the line ::
+
+ app-schema.encodeNestedFilters = false
+
+Or, alternatively, by setting the value of the Java System Property "app-schema.encodeNestedFilters" to "false", for example ::
+
+ java -DGEOSERVER_DATA_DIR=... -Dapp-schema.encodeNestedFilters=false Start
+
+UNION performance improvement for OR conditions
+-----------------------------------------------
+
+OR conditions are difficult to optimize for postgresql and are usually slow. App-Schema improves OR condition performance using UNION clauses instead OR for nested filter subqueries.
+
+With UNION improvement enabled main OR binary operator on nested filter subquery will rebuild normal OR query like::
+
+ SELECT id, name FROM table WHERE name = "A" OR name = "B"
+
+to::
+
+ SELECT id, name FROM table WHERE name = "A" UNION SELECT id, name FROM table WHERE name = "B"
+
+UNION improvement is enabled by default, and it is disabled by adding to your app-schema.properties file the line ::
+
+ app-schema.orUnionReplace = false
+
+Or, alternatively, by setting the value of the Java System Property "app-schema.orUnionReplace" to "false", for example ::
+
+ java -DGEOSERVER_DATA_DIR=... -Dapp-schema.orUnionReplace=false Start
+
+.. note::
This optimization will only be applied when a PostgresSQL database is being used.
\ No newline at end of file
diff --git a/doc/en/user/source/data/app-schema/polymorphism.rst b/doc/en/user/source/data/app-schema/polymorphism.rst
index c4b3cfdc85d..54e1dbc4340 100644
--- a/doc/en/user/source/data/app-schema/polymorphism.rst
+++ b/doc/en/user/source/data/app-schema/polymorphism.rst
@@ -1,317 +1,317 @@
-.. _app-schema.polymorphism:
-
-Polymorphism
-============
-
-Polymorphism in this context refers to the ability of an attribute to have different forms.
-Depending on the source value, it could be encoded with a specific structure, type, as an xlink:href reference, or not encoded at all.
-To achieve this, we reuse feature chaining syntax and allow OCQL functions in the linkElement tag.
-Read more about :ref:`app-schema.feature-chaining`, if you're not familiar with the syntax.
-
-
-Data-type polymorphism
-----------------------
-You can use normal feature chaining to get an attribute to be encoded as a certain type.
-For example::
-
-
- ex:someAttribute
-
- VALUE_ID
- NumericType
- FEATURE_LINK
-
-
-
- ex:someAttribute
-
- VALUE_ID
- gsml:CGI_TermValue
- FEATURE_LINK
-
-
-
-Note: NumericType here is a mappingName, whereas gsml:CGI_TermValue is a targetElement.
-
-In the above example, ex:someAttribute would be encoded with the configuration in NumericType if the foreign key matches the linkField.
-Both instances would be encoded if the foreign key matches the candidate keys in both linked configurations.
-Therefore this would only work for 0 to many relationships.
-
-Functions can be used for single attribute instances. See `useful functions`_ for a list of commonly used functions. Specify the function in the linkElement, and it would map it to the first matching FeatureTypeMapping.
-For example::
-
-
- ex:someAttribute
-
- VALUE_ID
-
- Recode(CLASS_TEXT, 'numeric', 'NumericType', 'literal', 'gsml:CGI_TermValue')
-
- FEATURE_LINK
-
- true
-
-
-The above example means, if the CLASS_TEXT value is 'numeric', it would link to 'NumericType' FeatureTypeMapping, with VALUE_ID as foreign key to the linked type.
-It would require all the potential matching types to have a common attribute that is specified in linkField. In this example, the linkField is FEATURE_LINK, which is a fake attribute used only for feature chaining.
-You can omit the linkField and OCQL if the FeatureTypeMapping being linked to has the same sourceType with the container type.
-This would save us from unnecessary extra queries, which would affect performance.
-For example:
-
-FeatureTypeMapping of the container type::
-
-
- PropertyFiles
- PolymorphicFeature
-
-FeatureTypeMapping of NumericType points to the same table::
-
-
- NumericType
- PropertyFiles
- PolymorphicFeature
-
-FeatureTypeMapping of gsml:CGI_TermValue also points to the same table::
-
-
- PropertyFiles
- PolymorphicFeature
- gsml:CGI_TermValue
-
-In this case, we can omit linkField in the polymorphic attribute mapping::
-
-
- ex:someAttribute
-
-
- Recode(CLASS_TEXT, 'numeric', 'NumericType', 'literal', 'gsml:CGI_TermValue')
-
-
- true
-
-
-
-Referential polymorphism
-------------------------
-This is when an attribute is set to be encoded as an xlink:href reference on the top level.
-When the scenario only has reference cases in it, setting a function in Client Property will do the job. E.g.::
-
-
- ex:someAttribute
-
- xlink:href
- if_then_else(isNull(NUMERIC_VALUE), 'urn:ogc:def:nil:OGC:1.0:missing', strConcat('#', NUMERIC_VALUE))
-
-
-
-The above example means, if NUMERIC_VALUE is null, the attribute should be encoded as::
-
-
-
-Otherwise, it would be encoded as::
-
-
- where NUMERIC_VALUE = '123'
-
-However, this is not possible when we have cases where a fully structured attribute is also a possibility.
-The `toxlinkhref`_ function can be used for this scenario. E.g.::
-
-
- ex:someAttribute
-
-
- if_then_else(isNull(NUMERIC_VALUE), toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing'),
- if_then_else(lessEqualThan(NUMERIC_VALUE, 1000), 'numeric_value', toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing')))
-
-
-
-
-The above example means, if NUMERIC_VALUE is null, the output would be encoded as::
-
-
-
-Otherwise, if NUMERIC_VALUE is less or equal than 1000, it would be encoded with attributes from FeatureTypeMapping with 'numeric_value' mappingName.
-If NUMERIC_VALUE is greater than 1000, it would be encoded as the first scenario.
-
-
-Useful functions
-----------------
-if_then_else function
-`````````````````````
-
-**Syntax**::
-
- if_then_else(BOOLEAN_EXPRESSION, value, default value)
-
-* **BOOLEAN_EXPRESSION**: could be a Boolean column value, or a Boolean function
-* **value**: the value to map to, if BOOLEAN_EXPRESSION is true
-* **default value**: the value to map to, if BOOLEAN_EXPRESSION is false
-
-Recode function
-```````````````
-
-**Syntax**::
-
- Recode(EXPRESSION, key1, value1, key2, value2,...)
-
-* **EXPRESSION**: column name to get values from, or another function
-* **key-n**:
- * key expression to map to value-n
- * if the evaluated value of EXPRESSION doesn't match any key, nothing would be encoded for the attribute.
-* **value-n**: value expression which translates to a mappingName or targetElement
-
-lessEqualThan
-`````````````
-Returns true if ATTRIBUTE_EXPRESSION evaluates to less or equal than LIMIT_EXPRESSION.
-
-**Syntax**::
-
- lessEqualThan(ATTRIBUTE_EXPRESSION, LIMIT_EXPRESSION)
-
-* **ATTRIBUTE_EXPRESSION**: expression of the attribute being evaluated.
-* **LIMIT_EXPRESSION**: expression of the numeric value to be compared against.
-
-lessThan
-````````
-Returns true if ATTRIBUTE_EXPRESSION evaluates to less than LIMIT_EXPRESSION.
-
-**Syntax**::
-
- lessThan(ATTRIBUTE_EXPRESSION, LIMIT_EXPRESSION)
-
-* **ATTRIBUTE_EXPRESSION**: expression of the attribute being evaluated.
-* **LIMIT_EXPRESSION**: expression of the numeric value to be compared against.
-
-equalTo
-```````
-Compares two expressions and returns true if they're equal.
-
-**Syntax**::
-
- equalTo(LHS_EXPRESSION, RHS_EXPRESSION)
-
-isNull
-``````
-Returns a Boolean that is true if the expression evaluates to null.
-
-**Syntax**::
-
- isNull(EXPRESSION)
-
-* **EXPRESSION**: expression to be evaluated.
-
-toXlinkHref
-```````````
-Special function written for referential polymorphism and feature chaining, not to be used outside of linkElement.
-It infers that the attribute should be encoded as xlink:href.
-
-**Syntax**::
-
- toXlinkHref(XLINK_HREF_EXPRESSION)
-
-* **XLINK_HREF_EXPRESSION**:
- * could be a function or a literal
- * has to be wrapped in single quotes if it's a literal
-
-.. note::
- * To get toXlinkHref function working, you need to declare xlink URI in the namespaces.
-
-Other functions
-```````````````
-Please refer to :ref:`filter_function_reference`.
-
-Combinations
-````````````
-You can combine functions, but it might affect performance.
-E.g.::
-
- if_then_else(isNull(NUMERIC_VALUE), toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing'),
- if_then_else(lessEqualThan(NUMERIC_VALUE, 1000), 'numeric_value', toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing')))
-
-
-.. note::
- * When specifying a mappingName or targetElement as a value in functions, make sure they're enclosed in single quotes.
- * Some functions have no null checking, and will fail when they encounter null.
- * The workaround for this is to wrap the expression with isNull() function if null is known to exist in the data set.
-
-
-Null or missing value
----------------------
-To skip the attribute for a specific case, you can use Expression.NIL as a value in if_then_else or not include the key in `Recode function`_ .
-E.g.::
-
- if_then_else(isNull(VALUE), Expression.NIL, 'gsml:CGI_TermValue')
- means the attribute would not be encoded if VALUE is null.
-
- Recode(VALUE, 'term_value', 'gsml:CGI_TermValue')
- means the attribute would not be encoded if VALUE is anything but 'term_value'.
-
-To encode an attribute as xlink:href that represents missing value on the top level, see `Referential Polymorphism`_.
-
-
-Any type
---------
-Having xs:anyType as the attribute type itself infers that it is polymorphic, since they can be encoded as any type.
-
-If the type is pre-determined and would always be the same, we might need to specify :ref:`app-schema.mapping-file.targetAttributeNode`.
-E.g.::
-
-
- om:result
- gml:MeasureType
-
- TOPAGE
-
-
- xsi:type
- 'gml:MeasureType'
-
-
- uom
- 'http://www.opengis.net/def/uom/UCUM/0/Ma'
-
-
-
-If the casting type is complex, this is not a requirement as app-schema is able to automatically determine the type from the XPath in targetAttribute.
-E.g., in this example ``om:result`` is automatically specialised as a MappedFeatureType::
-
-
- om:result/gsml:MappedFeature/gml:name
-
- NAME
-
-
-
-Alternatively, we can use feature chaining. For the same example above, the mapping would be::
-
-
- om:result
-
- LEX_D
- gsml:MappedFeature
- gml:name
-
-
-
-If the type is conditional, the mapping style for such attributes is the same as any other polymorphic attributes. E.g.::
-
-
- om:result
-
-
- Recode(NAME, Expression.Nil, toXlinkHref('urn:ogc:def:nil:OGC::missing'),'numeric',
- toXlinkHref(strConcat('urn:numeric-value::', NUMERIC_VALUE)), 'literal', 'TermValue2')
-
-
-
-
-
-Filters
--------
-Filters should work as usual, as long as the users know what they want to filter.
-For example, when an attribute could be encoded as gsml:CGI_TermValue or gsml:CGI_NumericValue, users can run filters with property names of:
-
- * ex:someAttribute/gsml:CGI_TermValue/gsml:value to return matching attributes that are encoded as gsml:CGI_TermValue and satisfy the filter.
- * likewise, ex:someAttribute/gsml:CGI_NumericValue/gsml:principalValue should return matching gsml:CGI_NumericValue attributes.
-
-Another limitation is filtering attributes of an xlink:href attribute pointing to an instance outside of the document.
+.. _app-schema.polymorphism:
+
+Polymorphism
+============
+
+Polymorphism in this context refers to the ability of an attribute to have different forms.
+Depending on the source value, it could be encoded with a specific structure, type, as an xlink:href reference, or not encoded at all.
+To achieve this, we reuse feature chaining syntax and allow OCQL functions in the linkElement tag.
+Read more about :ref:`app-schema.feature-chaining`, if you're not familiar with the syntax.
+
+
+Data-type polymorphism
+----------------------
+You can use normal feature chaining to get an attribute to be encoded as a certain type.
+For example::
+
+
+ ex:someAttribute
+
+ VALUE_ID
+ NumericType
+ FEATURE_LINK
+
+
+
+ ex:someAttribute
+
+ VALUE_ID
+ gsml:CGI_TermValue
+ FEATURE_LINK
+
+
+
+Note: NumericType here is a mappingName, whereas gsml:CGI_TermValue is a targetElement.
+
+In the above example, ex:someAttribute would be encoded with the configuration in NumericType if the foreign key matches the linkField.
+Both instances would be encoded if the foreign key matches the candidate keys in both linked configurations.
+Therefore this would only work for 0 to many relationships.
+
+Functions can be used for single attribute instances. See `useful functions`_ for a list of commonly used functions. Specify the function in the linkElement, and it would map it to the first matching FeatureTypeMapping.
+For example::
+
+
+ ex:someAttribute
+
+ VALUE_ID
+
+ Recode(CLASS_TEXT, 'numeric', 'NumericType', 'literal', 'gsml:CGI_TermValue')
+
+ FEATURE_LINK
+
+ true
+
+
+The above example means, if the CLASS_TEXT value is 'numeric', it would link to 'NumericType' FeatureTypeMapping, with VALUE_ID as foreign key to the linked type.
+It would require all the potential matching types to have a common attribute that is specified in linkField. In this example, the linkField is FEATURE_LINK, which is a fake attribute used only for feature chaining.
+You can omit the linkField and OCQL if the FeatureTypeMapping being linked to has the same sourceType with the container type.
+This would save us from unnecessary extra queries, which would affect performance.
+For example:
+
+FeatureTypeMapping of the container type::
+
+
+ PropertyFiles
+ PolymorphicFeature
+
+FeatureTypeMapping of NumericType points to the same table::
+
+
+ NumericType
+ PropertyFiles
+ PolymorphicFeature
+
+FeatureTypeMapping of gsml:CGI_TermValue also points to the same table::
+
+
+ PropertyFiles
+ PolymorphicFeature
+ gsml:CGI_TermValue
+
+In this case, we can omit linkField in the polymorphic attribute mapping::
+
+
+ ex:someAttribute
+
+
+ Recode(CLASS_TEXT, 'numeric', 'NumericType', 'literal', 'gsml:CGI_TermValue')
+
+
+ true
+
+
+
+Referential polymorphism
+------------------------
+This is when an attribute is set to be encoded as an xlink:href reference on the top level.
+When the scenario only has reference cases in it, setting a function in Client Property will do the job. E.g.::
+
+
+ ex:someAttribute
+
+ xlink:href
+ if_then_else(isNull(NUMERIC_VALUE), 'urn:ogc:def:nil:OGC:1.0:missing', strConcat('#', NUMERIC_VALUE))
+
+
+
+The above example means, if NUMERIC_VALUE is null, the attribute should be encoded as::
+
+
+
+Otherwise, it would be encoded as::
+
+
+ where NUMERIC_VALUE = '123'
+
+However, this is not possible when we have cases where a fully structured attribute is also a possibility.
+The `toxlinkhref`_ function can be used for this scenario. E.g.::
+
+
+ ex:someAttribute
+
+
+ if_then_else(isNull(NUMERIC_VALUE), toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing'),
+ if_then_else(lessEqualThan(NUMERIC_VALUE, 1000), 'numeric_value', toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing')))
+
+
+
+
+The above example means, if NUMERIC_VALUE is null, the output would be encoded as::
+
+
+
+Otherwise, if NUMERIC_VALUE is less or equal than 1000, it would be encoded with attributes from FeatureTypeMapping with 'numeric_value' mappingName.
+If NUMERIC_VALUE is greater than 1000, it would be encoded as the first scenario.
+
+
+Useful functions
+----------------
+if_then_else function
+`````````````````````
+
+**Syntax**::
+
+ if_then_else(BOOLEAN_EXPRESSION, value, default value)
+
+* **BOOLEAN_EXPRESSION**: could be a Boolean column value, or a Boolean function
+* **value**: the value to map to, if BOOLEAN_EXPRESSION is true
+* **default value**: the value to map to, if BOOLEAN_EXPRESSION is false
+
+Recode function
+```````````````
+
+**Syntax**::
+
+ Recode(EXPRESSION, key1, value1, key2, value2,...)
+
+* **EXPRESSION**: column name to get values from, or another function
+* **key-n**:
+ * key expression to map to value-n
+ * if the evaluated value of EXPRESSION doesn't match any key, nothing would be encoded for the attribute.
+* **value-n**: value expression which translates to a mappingName or targetElement
+
+lessEqualThan
+`````````````
+Returns true if ATTRIBUTE_EXPRESSION evaluates to less or equal than LIMIT_EXPRESSION.
+
+**Syntax**::
+
+ lessEqualThan(ATTRIBUTE_EXPRESSION, LIMIT_EXPRESSION)
+
+* **ATTRIBUTE_EXPRESSION**: expression of the attribute being evaluated.
+* **LIMIT_EXPRESSION**: expression of the numeric value to be compared against.
+
+lessThan
+````````
+Returns true if ATTRIBUTE_EXPRESSION evaluates to less than LIMIT_EXPRESSION.
+
+**Syntax**::
+
+ lessThan(ATTRIBUTE_EXPRESSION, LIMIT_EXPRESSION)
+
+* **ATTRIBUTE_EXPRESSION**: expression of the attribute being evaluated.
+* **LIMIT_EXPRESSION**: expression of the numeric value to be compared against.
+
+equalTo
+```````
+Compares two expressions and returns true if they're equal.
+
+**Syntax**::
+
+ equalTo(LHS_EXPRESSION, RHS_EXPRESSION)
+
+isNull
+``````
+Returns a Boolean that is true if the expression evaluates to null.
+
+**Syntax**::
+
+ isNull(EXPRESSION)
+
+* **EXPRESSION**: expression to be evaluated.
+
+toXlinkHref
+```````````
+Special function written for referential polymorphism and feature chaining, not to be used outside of linkElement.
+It infers that the attribute should be encoded as xlink:href.
+
+**Syntax**::
+
+ toXlinkHref(XLINK_HREF_EXPRESSION)
+
+* **XLINK_HREF_EXPRESSION**:
+ * could be a function or a literal
+ * has to be wrapped in single quotes if it's a literal
+
+.. note::
+ * To get toXlinkHref function working, you need to declare xlink URI in the namespaces.
+
+Other functions
+```````````````
+Please refer to :ref:`filter_function_reference`.
+
+Combinations
+````````````
+You can combine functions, but it might affect performance.
+E.g.::
+
+ if_then_else(isNull(NUMERIC_VALUE), toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing'),
+ if_then_else(lessEqualThan(NUMERIC_VALUE, 1000), 'numeric_value', toXlinkHref('urn:ogc:def:nil:OGC:1.0:missing')))
+
+
+.. note::
+ * When specifying a mappingName or targetElement as a value in functions, make sure they're enclosed in single quotes.
+ * Some functions have no null checking, and will fail when they encounter null.
+ * The workaround for this is to wrap the expression with isNull() function if null is known to exist in the data set.
+
+
+Null or missing value
+---------------------
+To skip the attribute for a specific case, you can use Expression.NIL as a value in if_then_else or not include the key in `Recode function`_ .
+E.g.::
+
+ if_then_else(isNull(VALUE), Expression.NIL, 'gsml:CGI_TermValue')
+ means the attribute would not be encoded if VALUE is null.
+
+ Recode(VALUE, 'term_value', 'gsml:CGI_TermValue')
+ means the attribute would not be encoded if VALUE is anything but 'term_value'.
+
+To encode an attribute as xlink:href that represents missing value on the top level, see `Referential Polymorphism`_.
+
+
+Any type
+--------
+Having xs:anyType as the attribute type itself infers that it is polymorphic, since they can be encoded as any type.
+
+If the type is pre-determined and would always be the same, we might need to specify :ref:`app-schema.mapping-file.targetAttributeNode`.
+E.g.::
+
+
+ om:result
+ gml:MeasureType
+
+ TOPAGE
+
+
+ xsi:type
+ 'gml:MeasureType'
+
+
+ uom
+ 'http://www.opengis.net/def/uom/UCUM/0/Ma'
+
+
+
+If the casting type is complex, this is not a requirement as app-schema is able to automatically determine the type from the XPath in targetAttribute.
+E.g., in this example ``om:result`` is automatically specialised as a MappedFeatureType::
+
+
+ om:result/gsml:MappedFeature/gml:name
+
+ NAME
+
+
+
+Alternatively, we can use feature chaining. For the same example above, the mapping would be::
+
+
+ om:result
+
+ LEX_D
+ gsml:MappedFeature
+ gml:name
+
+
+
+If the type is conditional, the mapping style for such attributes is the same as any other polymorphic attributes. E.g.::
+
+
+ om:result
+
+
+ Recode(NAME, Expression.Nil, toXlinkHref('urn:ogc:def:nil:OGC::missing'),'numeric',
+ toXlinkHref(strConcat('urn:numeric-value::', NUMERIC_VALUE)), 'literal', 'TermValue2')
+
+
+
+
+
+Filters
+-------
+Filters should work as usual, as long as the users know what they want to filter.
+For example, when an attribute could be encoded as gsml:CGI_TermValue or gsml:CGI_NumericValue, users can run filters with property names of:
+
+ * ex:someAttribute/gsml:CGI_TermValue/gsml:value to return matching attributes that are encoded as gsml:CGI_TermValue and satisfy the filter.
+ * likewise, ex:someAttribute/gsml:CGI_NumericValue/gsml:principalValue should return matching gsml:CGI_NumericValue attributes.
+
+Another limitation is filtering attributes of an xlink:href attribute pointing to an instance outside of the document.
diff --git a/doc/en/user/source/data/cascaded/wfs.rst b/doc/en/user/source/data/cascaded/wfs.rst
index f9122f4e1a0..07f5eff471a 100644
--- a/doc/en/user/source/data/cascaded/wfs.rst
+++ b/doc/en/user/source/data/cascaded/wfs.rst
@@ -1,93 +1,93 @@
-.. _data_external_wfs:
-
-External Web Feature Server
-===========================
-
-GeoServer has the ability to load data from a remote Web Feature Server (WFS). This is useful if the remote WFS lacks certain functionality that GeoServer contains. For example, if the remote WFS is not also a Web Map Server (WMS), data from the WFS can be cascaded through GeoServer to utilize GeoServer's WMS. If the remote WFS has a WMS but that WMS cannot output KML, data can be cascaded through GeoServer's WMS to output KML.
-
-Adding an external WFS
-----------------------
-
-To connect to an external WFS, it is necessary to load it as a new datastore. To start, navigate to :menuselection:`Stores --> Add a new store --> Web Feature Server`.
-
-.. figure:: images/externalwfs.png
- :align: center
-
- *Adding an external WFS as a store*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - :guilabel:`Workspace`
- - Name of the workspace to contain the store. This will also be the prefix of all of the layer names created from the store.
- * - :guilabel:`Data Source Name`
- - Name of the store as known to GeoServer.
- * - :guilabel:`Description`
- - Description of the store.
- * - :guilabel:`Enabled`
- - Enables the store. If disabled, no data from the external WFS will be served.
- * - :guilabel:`GET_CAPABILITIES_URL`
- - URL to access the capabilities document of the remote WFS.
- * - :guilabel:`PROTOCOL`
- - When checked, connects with POST, otherwise uses GET.
- * - :guilabel:`USERNAME`
- - The user name to connect to the external WFS.
- * - :guilabel:`PASSWORD`
- - The password associated with the above user name.
- * - :guilabel:`ENCODING`
- - The character encoding of the XML requests sent to the server. Defaults to ``UTF-8``.
- * - :guilabel:`TIMEOUT`
- - Time (in milliseconds) before timing out. Default is ``3000``.
- * - :guilabel:`BUFFER_SIZE`
- - Specifies a buffer size (in number of features). Default is ``10`` features.
- * - :guilabel:`TRY_GZIP`
- - Specifies that the server should transfer data using compressed HTTP if supported by the server.
- * - :guilabel:`LENIENT`
- - When checked, will try to render features that don't match the appropriate schema. Errors will be logged.
- * - :guilabel:`MAXFEATURES`
- - Maximum amount of features to retrieve for each featuretype. Default is no limit.
- * - :guilabel:`AXIS_ORDER`
- - Axis order used in result coordinates (It applies only to WFS 1.x.0 servers). Default is Compliant.
- * - :guilabel:`AXIS_ORDER_FILTER`
- - Axis order used in filter (It applies only to WFS 1.x.0 servers). Default is Compliant.
- * - :guilabel:`OUTPUTFORMAT`
- - Output format to request (instead of the default remote service one).
- * - :guilabel:`GML_COMPLIANCE_LEVEL`
- - OCG GML compliance level. i.e. (simple feature) 0, 1 or 2. Default is 0.
- * - :guilabel:`GML_COMPATIBLE_TYPENAMES`
- - Use Gml Compatible TypeNames (replace : by _). Default is no false.
- * - :guilabel:`USE_HTTP_CONNECTION_POOLING`
- - Use connection pooling to connect to the remote WFS service. Also enables digest authentcation.
-
-When finished, click :guilabel:`Save`.
-
-Configuring external WFS layers
--------------------------------
-
-When properly loaded, all layers served by the external WFS will be available to GeoServer. Before they can be served, however, they will need to be individually configured as new layers. See the section on :ref:`data_webadmin_layers` for how to add and edit new layers.
-
-Connecting to an external WFS layer via a proxy server
-------------------------------------------------------
-
-In a corporate environment it may be necessary to connect to an external WFS through a proxy server. To achieve this, various java variables need to be set.
-
-For a Windows install running GeoServer as a service, this is done by modifying the wrapper.conf file. For a default Windows install, modify :file:`C:\\Program Files\\GeoServer x.x.x\\wrapper\\wrapper.conf` similarly to the following.
-
- # Java Additional Parameters
-
- wrapper.java.additional.1=-Djetty.home=.
- wrapper.java.additional.2=-DGEOSERVER_DATA_DIR="%GEOSERVER_DATA_DIR%"
- wrapper.java.additional.3=-Dhttp.proxySet=true
- wrapper.java.additional.4=-Dhttp.proxyHost=maitproxy
- wrapper.java.additional.5=-Dhttp.proxyPort=8080
- wrapper.java.additional.6=-Dhttps.proxyHost=maitproxy
- wrapper.java.additional.7=-Dhttps.proxyPort=8080
- wrapper.java.additional.8=-Dhttp.nonProxyHosts="mait*|dpi*|localhost"
-
-Note that the :command:`http.proxySet=true` parameter is required. Also, the parameter numbers must be consecutive - ie. no gaps.
-
-For a Windows install not running GeoServer as a service, modify :file:`startup.bat` so that the :command:`java` command runs with similar -D parameters.
-
-For a Linux/UNIX install, modify :file:`startup.sh` so that the :command:`java` command runs with similar -D parameters.
+.. _data_external_wfs:
+
+External Web Feature Server
+===========================
+
+GeoServer has the ability to load data from a remote Web Feature Server (WFS). This is useful if the remote WFS lacks certain functionality that GeoServer contains. For example, if the remote WFS is not also a Web Map Server (WMS), data from the WFS can be cascaded through GeoServer to utilize GeoServer's WMS. If the remote WFS has a WMS but that WMS cannot output KML, data can be cascaded through GeoServer's WMS to output KML.
+
+Adding an external WFS
+----------------------
+
+To connect to an external WFS, it is necessary to load it as a new datastore. To start, navigate to :menuselection:`Stores --> Add a new store --> Web Feature Server`.
+
+.. figure:: images/externalwfs.png
+ :align: center
+
+ *Adding an external WFS as a store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - :guilabel:`Workspace`
+ - Name of the workspace to contain the store. This will also be the prefix of all of the layer names created from the store.
+ * - :guilabel:`Data Source Name`
+ - Name of the store as known to GeoServer.
+ * - :guilabel:`Description`
+ - Description of the store.
+ * - :guilabel:`Enabled`
+ - Enables the store. If disabled, no data from the external WFS will be served.
+ * - :guilabel:`GET_CAPABILITIES_URL`
+ - URL to access the capabilities document of the remote WFS.
+ * - :guilabel:`PROTOCOL`
+ - When checked, connects with POST, otherwise uses GET.
+ * - :guilabel:`USERNAME`
+ - The user name to connect to the external WFS.
+ * - :guilabel:`PASSWORD`
+ - The password associated with the above user name.
+ * - :guilabel:`ENCODING`
+ - The character encoding of the XML requests sent to the server. Defaults to ``UTF-8``.
+ * - :guilabel:`TIMEOUT`
+ - Time (in milliseconds) before timing out. Default is ``3000``.
+ * - :guilabel:`BUFFER_SIZE`
+ - Specifies a buffer size (in number of features). Default is ``10`` features.
+ * - :guilabel:`TRY_GZIP`
+ - Specifies that the server should transfer data using compressed HTTP if supported by the server.
+ * - :guilabel:`LENIENT`
+ - When checked, will try to render features that don't match the appropriate schema. Errors will be logged.
+ * - :guilabel:`MAXFEATURES`
+ - Maximum amount of features to retrieve for each featuretype. Default is no limit.
+ * - :guilabel:`AXIS_ORDER`
+ - Axis order used in result coordinates (It applies only to WFS 1.x.0 servers). Default is Compliant.
+ * - :guilabel:`AXIS_ORDER_FILTER`
+ - Axis order used in filter (It applies only to WFS 1.x.0 servers). Default is Compliant.
+ * - :guilabel:`OUTPUTFORMAT`
+ - Output format to request (instead of the default remote service one).
+ * - :guilabel:`GML_COMPLIANCE_LEVEL`
+ - OCG GML compliance level. i.e. (simple feature) 0, 1 or 2. Default is 0.
+ * - :guilabel:`GML_COMPATIBLE_TYPENAMES`
+ - Use Gml Compatible TypeNames (replace : by _). Default is no false.
+ * - :guilabel:`USE_HTTP_CONNECTION_POOLING`
+ - Use connection pooling to connect to the remote WFS service. Also enables digest authentcation.
+
+When finished, click :guilabel:`Save`.
+
+Configuring external WFS layers
+-------------------------------
+
+When properly loaded, all layers served by the external WFS will be available to GeoServer. Before they can be served, however, they will need to be individually configured as new layers. See the section on :ref:`data_webadmin_layers` for how to add and edit new layers.
+
+Connecting to an external WFS layer via a proxy server
+------------------------------------------------------
+
+In a corporate environment it may be necessary to connect to an external WFS through a proxy server. To achieve this, various java variables need to be set.
+
+For a Windows install running GeoServer as a service, this is done by modifying the wrapper.conf file. For a default Windows install, modify :file:`C:\\Program Files\\GeoServer x.x.x\\wrapper\\wrapper.conf` similarly to the following.
+
+ # Java Additional Parameters
+
+ wrapper.java.additional.1=-Djetty.home=.
+ wrapper.java.additional.2=-DGEOSERVER_DATA_DIR="%GEOSERVER_DATA_DIR%"
+ wrapper.java.additional.3=-Dhttp.proxySet=true
+ wrapper.java.additional.4=-Dhttp.proxyHost=maitproxy
+ wrapper.java.additional.5=-Dhttp.proxyPort=8080
+ wrapper.java.additional.6=-Dhttps.proxyHost=maitproxy
+ wrapper.java.additional.7=-Dhttps.proxyPort=8080
+ wrapper.java.additional.8=-Dhttp.nonProxyHosts="mait*|dpi*|localhost"
+
+Note that the :command:`http.proxySet=true` parameter is required. Also, the parameter numbers must be consecutive - ie. no gaps.
+
+For a Windows install not running GeoServer as a service, modify :file:`startup.bat` so that the :command:`java` command runs with similar -D parameters.
+
+For a Linux/UNIX install, modify :file:`startup.sh` so that the :command:`java` command runs with similar -D parameters.
diff --git a/doc/en/user/source/data/database/db2.rst b/doc/en/user/source/data/database/db2.rst
index f7eceffa1ee..c6586e62f46 100644
--- a/doc/en/user/source/data/database/db2.rst
+++ b/doc/en/user/source/data/database/db2.rst
@@ -1,63 +1,63 @@
-.. _data_Db2:
-
-Db2
-===
-
-.. note:: GeoServer does not come built-in with support for Db2; it must be installed through an extension. Proceed to :ref:`Db2_install` for installation details.
-
-The Db2 spatial support implements the OGC specification "Simple Features for SQL using types and functions" and the ISO "SQL/MM Part 3 Spatial" standard. When installing Db2 on Linux, Unix and Windows platforms, the "custom" option must be selected and the server spatial support included.
-
-A free of charge copy of Db2 can be downloaded from https://www.ibm.com/analytics/db2/trials.
-
-
-.. _Db2_install:
-
-Installing the Db2 extension
-----------------------------
-
-.. warning:: Due to licensing requirements, not all files are included with the extension. To install Db2 support, it is necessary to download additional files. **Just installing the Db2 extension will have no effect.**
-
-GeoServer files
-```````````````
-
-#. Download the Db2 extension from the `GeoServer download page
- `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of
- the GeoServer installation.
-
-Required external files
-```````````````````````
-
-The Db2 JDBC driver is not packaged with the GeoServer extension: :file:`db2jcc4.jar`. This file should be available in the :file:`java` subdirectory of your Db2 installation directory. Copy this file to the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-
-After all GeoServer files and external files have been downloaded and copied, restart GeoServer.
-
-Adding a Db2 data store
------------------------
-
-When properly installed, :guilabel:`Db2` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
-
-.. figure:: images/db2create.png
- :align: center
-
- *Db2 in the list of raster data stores*
-
-Configuring a Db2 data store
-----------------------------
-
-.. figure:: images/db2configure.png
- :align: center
-
- *Configuring a Db2 data store*
-
-Configuring a Db2 data store with JNDI
---------------------------------------
-
-Notes on usage
---------------
-
-Db2 schema, table, and column names are all case-sensitive when working with GeoTools/GeoServer. When working with Db2 scripts and the Db2 command window, the default is to treat these names as upper-case unless enclosed in double-quote characters but this is not the case in GeoServer.
+.. _data_Db2:
+
+Db2
+===
+
+.. note:: GeoServer does not come built-in with support for Db2; it must be installed through an extension. Proceed to :ref:`Db2_install` for installation details.
+
+The Db2 spatial support implements the OGC specification "Simple Features for SQL using types and functions" and the ISO "SQL/MM Part 3 Spatial" standard. When installing Db2 on Linux, Unix and Windows platforms, the "custom" option must be selected and the server spatial support included.
+
+A free of charge copy of Db2 can be downloaded from https://www.ibm.com/analytics/db2/trials.
+
+
+.. _Db2_install:
+
+Installing the Db2 extension
+----------------------------
+
+.. warning:: Due to licensing requirements, not all files are included with the extension. To install Db2 support, it is necessary to download additional files. **Just installing the Db2 extension will have no effect.**
+
+GeoServer files
+```````````````
+
+#. Download the Db2 extension from the `GeoServer download page
+ `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of
+ the GeoServer installation.
+
+Required external files
+```````````````````````
+
+The Db2 JDBC driver is not packaged with the GeoServer extension: :file:`db2jcc4.jar`. This file should be available in the :file:`java` subdirectory of your Db2 installation directory. Copy this file to the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+
+After all GeoServer files and external files have been downloaded and copied, restart GeoServer.
+
+Adding a Db2 data store
+-----------------------
+
+When properly installed, :guilabel:`Db2` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
+
+.. figure:: images/db2create.png
+ :align: center
+
+ *Db2 in the list of raster data stores*
+
+Configuring a Db2 data store
+----------------------------
+
+.. figure:: images/db2configure.png
+ :align: center
+
+ *Configuring a Db2 data store*
+
+Configuring a Db2 data store with JNDI
+--------------------------------------
+
+Notes on usage
+--------------
+
+Db2 schema, table, and column names are all case-sensitive when working with GeoTools/GeoServer. When working with Db2 scripts and the Db2 command window, the default is to treat these names as upper-case unless enclosed in double-quote characters but this is not the case in GeoServer.
diff --git a/doc/en/user/source/data/database/h2.rst b/doc/en/user/source/data/database/h2.rst
index 1f487d57993..f2a4f5c0f75 100644
--- a/doc/en/user/source/data/database/h2.rst
+++ b/doc/en/user/source/data/database/h2.rst
@@ -1,39 +1,39 @@
-.. _data_h2:
-
-H2
-==
-
-.. note:: GeoServer does not come built-in with support for H2; it must be installed through an extension. Proceed to :ref:`h2_install` for installation details.
-
-.. _h2_install:
-
-Installing the H2 extension
-----------------------------
-
-#. Download the H2 extension from the `GeoServer download page
- `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding an H2 data store
------------------------
-
-Once the extension is properly installed :guilabel:`H2` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
-
-.. figure:: images/h2create.png
- :align: center
-
- *H2 in the list of vector data stores*
-
-Configuring an H2 data store
-----------------------------
-
-.. figure:: images/h2configure.png
- :align: center
-
- *Configuring an H2 data store*
-
-Configuring an H2 data store with JNDI
+.. _data_h2:
+
+H2
+==
+
+.. note:: GeoServer does not come built-in with support for H2; it must be installed through an extension. Proceed to :ref:`h2_install` for installation details.
+
+.. _h2_install:
+
+Installing the H2 extension
+----------------------------
+
+#. Download the H2 extension from the `GeoServer download page
+ `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding an H2 data store
+-----------------------
+
+Once the extension is properly installed :guilabel:`H2` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
+
+.. figure:: images/h2create.png
+ :align: center
+
+ *H2 in the list of vector data stores*
+
+Configuring an H2 data store
+----------------------------
+
+.. figure:: images/h2configure.png
+ :align: center
+
+ *Configuring an H2 data store*
+
+Configuring an H2 data store with JNDI
--------------------------------------
\ No newline at end of file
diff --git a/doc/en/user/source/data/database/mysql.rst b/doc/en/user/source/data/database/mysql.rst
index 92f3f556a96..592ae3fd1c7 100644
--- a/doc/en/user/source/data/database/mysql.rst
+++ b/doc/en/user/source/data/database/mysql.rst
@@ -1,63 +1,63 @@
-.. _data_mysql:
-
-MySQL
-=====
-
-.. note:: GeoServer does not come built-in with support for MySQL; it must be installed through an extension. Proceed to :ref:`mysql_install` for installation details.
-
-.. warning:: Currently the MySQL extension is unmaintained and carries unsupported status. While still usable, do not expect the same reliability as with other extensions.
-
-`MySQL `_ is an open source relational database with some limited spatial functionality.
-
-.. _mysql_install:
-
-Installing the MySQL extension
-------------------------------
-
-#. Download the MySQL extension from the `GeoServer download page `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding a MySQL database
------------------------
-
-Once the extension is properly installed ``MySQL`` will show up as an option when creating a new data store.
-
-.. figure:: images/mysqlcreate.png
- :align: center
-
- *MySQL in the list of data sources*
-
-Configuring a MySQL data store
-------------------------------
-
-.. figure:: images/mysqlconfigure.png
- :align: center
-
- *Configuring a MySQL data store*
-
-.. list-table::
- :widths: 20 80
-
- * - ``host``
- - The mysql server host name or ip address.
- * - ``port``
- - The port on which the mysql server is accepting connections.
- * - ``database``
- - The name of the database to connect to. Can also contain a suffix with a connection URL query, such as mydbname?useSSL=false
- * - ``user``
- - The name of the user to connect to the mysql database as.
- * - ``password``
- - The password to use when connecting to the database. Left blank for no
- password.
- * - ``max connections``
-
- ``min connections``
-
- ``validate connections``
-
- - Connection pool configuration parameters. See the
- :ref:`connection_pooling` section for details.
-
+.. _data_mysql:
+
+MySQL
+=====
+
+.. note:: GeoServer does not come built-in with support for MySQL; it must be installed through an extension. Proceed to :ref:`mysql_install` for installation details.
+
+.. warning:: Currently the MySQL extension is unmaintained and carries unsupported status. While still usable, do not expect the same reliability as with other extensions.
+
+`MySQL `_ is an open source relational database with some limited spatial functionality.
+
+.. _mysql_install:
+
+Installing the MySQL extension
+------------------------------
+
+#. Download the MySQL extension from the `GeoServer download page `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding a MySQL database
+-----------------------
+
+Once the extension is properly installed ``MySQL`` will show up as an option when creating a new data store.
+
+.. figure:: images/mysqlcreate.png
+ :align: center
+
+ *MySQL in the list of data sources*
+
+Configuring a MySQL data store
+------------------------------
+
+.. figure:: images/mysqlconfigure.png
+ :align: center
+
+ *Configuring a MySQL data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - ``host``
+ - The mysql server host name or ip address.
+ * - ``port``
+ - The port on which the mysql server is accepting connections.
+ * - ``database``
+ - The name of the database to connect to. Can also contain a suffix with a connection URL query, such as mydbname?useSSL=false
+ * - ``user``
+ - The name of the user to connect to the mysql database as.
+ * - ``password``
+ - The password to use when connecting to the database. Left blank for no
+ password.
+ * - ``max connections``
+
+ ``min connections``
+
+ ``validate connections``
+
+ - Connection pool configuration parameters. See the
+ :ref:`connection_pooling` section for details.
+
diff --git a/doc/en/user/source/data/database/oracle.rst b/doc/en/user/source/data/database/oracle.rst
index 2fcbb43f188..b70f205bd3f 100644
--- a/doc/en/user/source/data/database/oracle.rst
+++ b/doc/en/user/source/data/database/oracle.rst
@@ -1,148 +1,148 @@
-.. _data_oracle:
-
-Oracle
-======
-
-.. note:: GeoServer does not come built-in with support for Oracle; it must be installed through an extension. Proceed to :ref:`oracle_install` for installation details.
-
-`Oracle Spatial and Locator `_ are the spatial components of Oracle.
-**Locator** is provided with all Oracle versions, but has limited spatial functions.
-**Spatial** is Oracle's full-featured spatial offering, but requires a specific license to use.
-
-.. _oracle_install:
-
-Installing the Oracle extension
--------------------------------
-
-#. Download the Oracle extension from the `GeoServer download page `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding an Oracle datastore
---------------------------
-
-Once the extension is properly installed :guilabel:`Oracle` appears as an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
-
-.. figure:: images/oraclecreate.png
- :align: center
-
- *Oracle in the list of data sources*
-
-Configuring an Oracle datastore
--------------------------------
-
-.. figure:: images/oracleconfigure.png
- :align: center
-
- *Configuring an Oracle datastore*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - ``host``
- - The Oracle server host name or IP address.
- * - ``port``
- - The port on which the Oracle server is accepting connections (often this is port 1521).
- * - ``database``
- - The name of the database to connect to.
- By default this is interpreted as a SID name. To connect to a Service, prefix the name with a ``/``.
- * - ``schema``
- - The database schema to access tables from. Setting this value greatly increases the speed at which the data store displays its publishable tables and views, so it is advisable to set this.
- * - ``user``
- - The name of the user to use when connecting to the database.
- * - ``password``
- - The password to use when connecting to the database. Leave blank for no password.
- * - ``max connections``
- ``min connections``
- ``fetch size``
- ``Connection timeout``
- ``validate connections``
- - Connection pool configuration parameters. See :ref:`connection_pooling` for details.
- * - ``Loose bbox``
- - Controls how bounding box filters are made against geometries in the database. See the :ref:`oracle_loose_bbox` section below.
- * - ``Metadata bbox``
- - Flag controlling the use of MDSYS.USER_SDO_GEOM_METADATA or MDSYS.ALL_SDO_GEOM_METADATA table for bounding box calculations, this brings a better performance if the views access is fast and the bounds are configured right in the tables default is false
-
-Connecting to an Oracle cluster
--------------------------------
-
-In order to connect to an Oracle RAC one can use an almost full JDBC url as the ``database``, provided it starts with ``(`` it will be used verbatim and options "host" and "port" will be ignored. Here is an example "database" value used to connect to an Oracle RAC::
-
- (DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=host1) (PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=host2) (PORT=1521))(CONNECT_DATA=(SERVICE_NAME=service)))
-
-More information about this syntax can be found in the `Oracle documentation `_.
-
-Connecting to a SID or a Service
-````````````````````````````````
-
-Recent versions of Oracle support connecting to a database via either a SID name or a Service name.
-A SID connection descriptor has the form: ``host:port:database``,
-while a Service connection descriptor has the format ``host:port/database``.
-GeoServer uses the SID form by default. To connect via a Service,
-prefix the ``database`` name configuration entry with a ``/``.
-
-Connecting to database through LDAP
-`````````````````````````````````````
-
-For instance if you want to establish a connection with the jdbc thin driver through LDAP, you can use following connect string for the input field ``database``
-``ldap://[host]:[Port]/[db],cn=OracleContext,dc=[oracle_ldap_context]``.
-
-If you are using referrals, enable it by placing a jndi.properties file in geoserver's CLASSPATH, which is in geoserver/WEB-INF/classes.
-This property file contains:
-
- java.naming.referral=follow
-
-
-.. _oracle_loose_bbox:
-
-Using loose bounding box
-````````````````````````
-
-When the ``Loose bbox`` option is set, only the bounding box of database geometries is used in spatial queries. This results in a significant performance gain. The downside is that some geometries may be reported as intersecting a BBOX when they actually do not.
-
-If the primary use of the database is through the :ref:`WMS` this flag can be set safely, since querying more geometries does not have any visible effect. However, if using the :ref:`WFS` and making use of BBOX filtering capabilities, this flag should not be set.
-
-Using the geometry metadata table
-`````````````````````````````````
-
-The Oracle data store by default looks at the ``MDSYS.USER_SDO*`` and ``MDSYS.ALL_SDO*`` views
-to determine the geometry type and native SRID of each geometry column.
-Those views are automatically populated with information about the geometry columns stored in tables that the current
-user owns (for the ``MDSYS.USER_SDO*`` views) or can otherwise access (for the ``MDSYS.ALL_SDO*`` views).
-
-There are a few issues with this strategy:
-
- * if the connection pool user cannot access the tables (because :ref:`impersonation ` is used)
- the MDSYS views will be empty, making it impossible to determine both the geometry type and the native SRID
- * the geometry type can be specified only while building the spatial indexes, as an index constraint. However
- such information is often not included when creating the indexes
- * the views are populated dynamically based on the current user. If the database has thousands of tables and users
- the views can become very slow
-
-Starting with GeoServer 2.1.4 the administrator can address the above issues by manually creating a geometry metadata table
-describing each geometry column.
-Its presence is indicated via the Oracle datastore connection parameter named *Geometry metadata table*
-(which may be a simple table name or a schema-qualified one).
-The table has the following structure (the table name is flexible, just specify the one chosen in the data store connection parameter)::
-
- CREATE TABLE GEOMETRY_COLUMNS(
- F_TABLE_SCHEMA VARCHAR(30) NOT NULL,
- F_TABLE_NAME VARCHAR(30) NOT NULL,
- F_GEOMETRY_COLUMN VARCHAR(30) NOT NULL,
- COORD_DIMENSION INTEGER,
- SRID INTEGER NOT NULL,
- TYPE VARCHAR(30) NOT NULL,
- UNIQUE(F_TABLE_SCHEMA, F_TABLE_NAME, F_GEOMETRY_COLUMN),
- CHECK(TYPE IN ('POINT','LINE', 'POLYGON', 'COLLECTION', 'MULTIPOINT', 'MULTILINE', 'MULTIPOLYGON', 'GEOMETRY') ));
-
-When the table is present the store first searches it for information about each geometry column
-to be classified, and falls back on the MDSYS views only if the table does not contain any information.
-
-Configuring an Oracle database with JNDI
-----------------------------------------
-
-See :ref:`tomcat_jndi` for a guide on setting up an Oracle connection using JNDI.
+.. _data_oracle:
+
+Oracle
+======
+
+.. note:: GeoServer does not come built-in with support for Oracle; it must be installed through an extension. Proceed to :ref:`oracle_install` for installation details.
+
+`Oracle Spatial and Locator `_ are the spatial components of Oracle.
+**Locator** is provided with all Oracle versions, but has limited spatial functions.
+**Spatial** is Oracle's full-featured spatial offering, but requires a specific license to use.
+
+.. _oracle_install:
+
+Installing the Oracle extension
+-------------------------------
+
+#. Download the Oracle extension from the `GeoServer download page `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding an Oracle datastore
+--------------------------
+
+Once the extension is properly installed :guilabel:`Oracle` appears as an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
+
+.. figure:: images/oraclecreate.png
+ :align: center
+
+ *Oracle in the list of data sources*
+
+Configuring an Oracle datastore
+-------------------------------
+
+.. figure:: images/oracleconfigure.png
+ :align: center
+
+ *Configuring an Oracle datastore*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - ``host``
+ - The Oracle server host name or IP address.
+ * - ``port``
+ - The port on which the Oracle server is accepting connections (often this is port 1521).
+ * - ``database``
+ - The name of the database to connect to.
+ By default this is interpreted as a SID name. To connect to a Service, prefix the name with a ``/``.
+ * - ``schema``
+ - The database schema to access tables from. Setting this value greatly increases the speed at which the data store displays its publishable tables and views, so it is advisable to set this.
+ * - ``user``
+ - The name of the user to use when connecting to the database.
+ * - ``password``
+ - The password to use when connecting to the database. Leave blank for no password.
+ * - ``max connections``
+ ``min connections``
+ ``fetch size``
+ ``Connection timeout``
+ ``validate connections``
+ - Connection pool configuration parameters. See :ref:`connection_pooling` for details.
+ * - ``Loose bbox``
+ - Controls how bounding box filters are made against geometries in the database. See the :ref:`oracle_loose_bbox` section below.
+ * - ``Metadata bbox``
+ - Flag controlling the use of MDSYS.USER_SDO_GEOM_METADATA or MDSYS.ALL_SDO_GEOM_METADATA table for bounding box calculations, this brings a better performance if the views access is fast and the bounds are configured right in the tables default is false
+
+Connecting to an Oracle cluster
+-------------------------------
+
+In order to connect to an Oracle RAC one can use an almost full JDBC url as the ``database``, provided it starts with ``(`` it will be used verbatim and options "host" and "port" will be ignored. Here is an example "database" value used to connect to an Oracle RAC::
+
+ (DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=host1) (PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=host2) (PORT=1521))(CONNECT_DATA=(SERVICE_NAME=service)))
+
+More information about this syntax can be found in the `Oracle documentation `_.
+
+Connecting to a SID or a Service
+````````````````````````````````
+
+Recent versions of Oracle support connecting to a database via either a SID name or a Service name.
+A SID connection descriptor has the form: ``host:port:database``,
+while a Service connection descriptor has the format ``host:port/database``.
+GeoServer uses the SID form by default. To connect via a Service,
+prefix the ``database`` name configuration entry with a ``/``.
+
+Connecting to database through LDAP
+`````````````````````````````````````
+
+For instance if you want to establish a connection with the jdbc thin driver through LDAP, you can use following connect string for the input field ``database``
+``ldap://[host]:[Port]/[db],cn=OracleContext,dc=[oracle_ldap_context]``.
+
+If you are using referrals, enable it by placing a jndi.properties file in geoserver's CLASSPATH, which is in geoserver/WEB-INF/classes.
+This property file contains:
+
+ java.naming.referral=follow
+
+
+.. _oracle_loose_bbox:
+
+Using loose bounding box
+````````````````````````
+
+When the ``Loose bbox`` option is set, only the bounding box of database geometries is used in spatial queries. This results in a significant performance gain. The downside is that some geometries may be reported as intersecting a BBOX when they actually do not.
+
+If the primary use of the database is through the :ref:`WMS` this flag can be set safely, since querying more geometries does not have any visible effect. However, if using the :ref:`WFS` and making use of BBOX filtering capabilities, this flag should not be set.
+
+Using the geometry metadata table
+`````````````````````````````````
+
+The Oracle data store by default looks at the ``MDSYS.USER_SDO*`` and ``MDSYS.ALL_SDO*`` views
+to determine the geometry type and native SRID of each geometry column.
+Those views are automatically populated with information about the geometry columns stored in tables that the current
+user owns (for the ``MDSYS.USER_SDO*`` views) or can otherwise access (for the ``MDSYS.ALL_SDO*`` views).
+
+There are a few issues with this strategy:
+
+ * if the connection pool user cannot access the tables (because :ref:`impersonation ` is used)
+ the MDSYS views will be empty, making it impossible to determine both the geometry type and the native SRID
+ * the geometry type can be specified only while building the spatial indexes, as an index constraint. However
+ such information is often not included when creating the indexes
+ * the views are populated dynamically based on the current user. If the database has thousands of tables and users
+ the views can become very slow
+
+Starting with GeoServer 2.1.4 the administrator can address the above issues by manually creating a geometry metadata table
+describing each geometry column.
+Its presence is indicated via the Oracle datastore connection parameter named *Geometry metadata table*
+(which may be a simple table name or a schema-qualified one).
+The table has the following structure (the table name is flexible, just specify the one chosen in the data store connection parameter)::
+
+ CREATE TABLE GEOMETRY_COLUMNS(
+ F_TABLE_SCHEMA VARCHAR(30) NOT NULL,
+ F_TABLE_NAME VARCHAR(30) NOT NULL,
+ F_GEOMETRY_COLUMN VARCHAR(30) NOT NULL,
+ COORD_DIMENSION INTEGER,
+ SRID INTEGER NOT NULL,
+ TYPE VARCHAR(30) NOT NULL,
+ UNIQUE(F_TABLE_SCHEMA, F_TABLE_NAME, F_GEOMETRY_COLUMN),
+ CHECK(TYPE IN ('POINT','LINE', 'POLYGON', 'COLLECTION', 'MULTIPOINT', 'MULTILINE', 'MULTIPOLYGON', 'GEOMETRY') ));
+
+When the table is present the store first searches it for information about each geometry column
+to be classified, and falls back on the MDSYS views only if the table does not contain any information.
+
+Configuring an Oracle database with JNDI
+----------------------------------------
+
+See :ref:`tomcat_jndi` for a guide on setting up an Oracle connection using JNDI.
diff --git a/doc/en/user/source/data/database/sqlserver.rst b/doc/en/user/source/data/database/sqlserver.rst
index 61d14f6eb4a..03b810be3b0 100644
--- a/doc/en/user/source/data/database/sqlserver.rst
+++ b/doc/en/user/source/data/database/sqlserver.rst
@@ -1,102 +1,102 @@
-.. _data_sqlserver:
-
-Microsoft SQL Server and SQL Azure
-==================================
-
-.. note:: GeoServer does not come built-in with support for SQL Server; it must be installed through an extension. Proceed to :ref:`sqlserver_install` for installation details.
-
-Microsoft's `SQL Server `_ is a relational database with spatial functionality. SQL Azure is the database option provided in the Azure cloud solution which is in many respects similar to SQL Server.
-
-Supported versions
-------------------
-
-The extension supports SQL Server 2008 - 2019 and SQL Azure.
-
-.. _sqlserver_install:
-
-Installing the SQL Server extension
------------------------------------
-
-GeoServer files
-```````````````
-
-#. Download the SQL Server extension from the `GeoServer download page `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-#. Restart the GeoServer to load the extension.
-
-Adding a SQL Server database
-----------------------------
-
-Once the extension is properly installed ``SQL Server`` will show up as an option when creating a new data store.
-
-.. figure:: images/sqlservercreate.png
- :align: center
-
- *SQL Server in the list of vector data sources*
-
-Configuring a SQL Server data store
------------------------------------
-
-.. figure:: images/sqlserverconfigure.png
- :align: center
-
- *Configuring a SQL Server data store*
-
-.. list-table::
- :widths: 20 80
-
- * - ``host``
- - The sql server instance host name or ip address, only. Note that ``server\instance`` notation is not accepted - specify the port below, instead, if you have a non-default instance.
- * - ``port``
- - The port on which the SQL server instance is accepting connections. See the :ref:`note ` below.
- * - ``database``
- - The name of the database to connect to. Might be left blank if the user connecting to SQL Server has a "default database" set in the user configuration
- * - ``schema``
- - The database schema to access tables from (optional).
- * - ``user``
- - The name of the user to connect to the database as.
- * - ``password``
- - The password to use when connecting to the database. Leave blank for no password.
- * - ``max connections``
-
- ``min connections``
-
- - Connection pool configuration parameters. See the :ref:`connection_pooling` section for details. If you are connecting to SQL Azure make sure to set the ``validate connections`` flag as SQL Azure closes inactive connections after a very short delay.
-
-.. _port_notes:
-
-Determining the port used by the SQL Server instance
-````````````````````````````````````````````````````
-
-You can determine the port in use by connecting to your SQL server instance using some other software, and then using :command:`netstat` to display details on network connections. In the following example on a Windows PC, the port is 2646 ::
-
- C:\>netstat -a | find "sql1"
- TCP DPI908194:1918 maittestsql1.dpi.nsw.gov.au:2646 ESTABLISHED
-
-
-Using the geometry metadata table
-`````````````````````````````````
-
-The SQL server data store can determine the geometry type and native SRID of a particular column only by data inspection,
-by looking at the first row in the table. Of course this is error prone, and works only if there is data in the table.
-The administrator can address the above issue by manually creating a geometry metadata table describing each geometry column.
-Its presence is indicated via the SQL Server datastore connection parameter named *Geometry metadata table*
-(which may be a simple table name or a schema-qualified one).
-The table has the following structure (the table name is flexible, just specify the one chosen in the data store connection parameter)::
-
- CREATE TABLE GEOMETRY_COLUMNS(
- F_TABLE_SCHEMA VARCHAR(30) NOT NULL,
- F_TABLE_NAME VARCHAR(30) NOT NULL,
- F_GEOMETRY_COLUMN VARCHAR(30) NOT NULL,
- COORD_DIMENSION INTEGER,
- SRID INTEGER NOT NULL,
- TYPE VARCHAR(30) NOT NULL,
- UNIQUE(F_TABLE_SCHEMA, F_TABLE_NAME, F_GEOMETRY_COLUMN),
- CHECK(TYPE IN ('POINT', 'LINESTRING', 'POLYGON', 'MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION') ));
-
-When the table is present the store first searches it for information about each geometry column
-to be classified, and falls back on data inspection only if the table does not contain any information.
+.. _data_sqlserver:
+
+Microsoft SQL Server and SQL Azure
+==================================
+
+.. note:: GeoServer does not come built-in with support for SQL Server; it must be installed through an extension. Proceed to :ref:`sqlserver_install` for installation details.
+
+Microsoft's `SQL Server `_ is a relational database with spatial functionality. SQL Azure is the database option provided in the Azure cloud solution which is in many respects similar to SQL Server.
+
+Supported versions
+------------------
+
+The extension supports SQL Server 2008 - 2019 and SQL Azure.
+
+.. _sqlserver_install:
+
+Installing the SQL Server extension
+-----------------------------------
+
+GeoServer files
+```````````````
+
+#. Download the SQL Server extension from the `GeoServer download page `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+#. Restart the GeoServer to load the extension.
+
+Adding a SQL Server database
+----------------------------
+
+Once the extension is properly installed ``SQL Server`` will show up as an option when creating a new data store.
+
+.. figure:: images/sqlservercreate.png
+ :align: center
+
+ *SQL Server in the list of vector data sources*
+
+Configuring a SQL Server data store
+-----------------------------------
+
+.. figure:: images/sqlserverconfigure.png
+ :align: center
+
+ *Configuring a SQL Server data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - ``host``
+ - The sql server instance host name or ip address, only. Note that ``server\instance`` notation is not accepted - specify the port below, instead, if you have a non-default instance.
+ * - ``port``
+ - The port on which the SQL server instance is accepting connections. See the :ref:`note ` below.
+ * - ``database``
+ - The name of the database to connect to. Might be left blank if the user connecting to SQL Server has a "default database" set in the user configuration
+ * - ``schema``
+ - The database schema to access tables from (optional).
+ * - ``user``
+ - The name of the user to connect to the database as.
+ * - ``password``
+ - The password to use when connecting to the database. Leave blank for no password.
+ * - ``max connections``
+
+ ``min connections``
+
+ - Connection pool configuration parameters. See the :ref:`connection_pooling` section for details. If you are connecting to SQL Azure make sure to set the ``validate connections`` flag as SQL Azure closes inactive connections after a very short delay.
+
+.. _port_notes:
+
+Determining the port used by the SQL Server instance
+````````````````````````````````````````````````````
+
+You can determine the port in use by connecting to your SQL server instance using some other software, and then using :command:`netstat` to display details on network connections. In the following example on a Windows PC, the port is 2646 ::
+
+ C:\>netstat -a | find "sql1"
+ TCP DPI908194:1918 maittestsql1.dpi.nsw.gov.au:2646 ESTABLISHED
+
+
+Using the geometry metadata table
+`````````````````````````````````
+
+The SQL server data store can determine the geometry type and native SRID of a particular column only by data inspection,
+by looking at the first row in the table. Of course this is error prone, and works only if there is data in the table.
+The administrator can address the above issue by manually creating a geometry metadata table describing each geometry column.
+Its presence is indicated via the SQL Server datastore connection parameter named *Geometry metadata table*
+(which may be a simple table name or a schema-qualified one).
+The table has the following structure (the table name is flexible, just specify the one chosen in the data store connection parameter)::
+
+ CREATE TABLE GEOMETRY_COLUMNS(
+ F_TABLE_SCHEMA VARCHAR(30) NOT NULL,
+ F_TABLE_NAME VARCHAR(30) NOT NULL,
+ F_GEOMETRY_COLUMN VARCHAR(30) NOT NULL,
+ COORD_DIMENSION INTEGER,
+ SRID INTEGER NOT NULL,
+ TYPE VARCHAR(30) NOT NULL,
+ UNIQUE(F_TABLE_SCHEMA, F_TABLE_NAME, F_GEOMETRY_COLUMN),
+ CHECK(TYPE IN ('POINT', 'LINESTRING', 'POLYGON', 'MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION') ));
+
+When the table is present the store first searches it for information about each geometry column
+to be classified, and falls back on data inspection only if the table does not contain any information.
diff --git a/doc/en/user/source/data/database/sqlsession.rst b/doc/en/user/source/data/database/sqlsession.rst
index 41e184bca1e..cd94db0aa77 100644
--- a/doc/en/user/source/data/database/sqlsession.rst
+++ b/doc/en/user/source/data/database/sqlsession.rst
@@ -1,71 +1,71 @@
-.. _data_sqlsession:
-
-Custom SQL session start/stop scripts
-=====================================
-
-Starting with version 2.1.4 GeoServer support custom SQL scripts that can be run every time GeoServer
-grabs a connection from the connection pool, and every time the session is returned to the pool.
-
-These scripts can be parametrized with the expansion of environment variables, which can be in turn
-set into the OGC request parameters with the same mechanism as :ref:`sld_variable_substitution`.
-
-In addition to the parameters provided via the request the ``GSUSER`` variable is guaranteed to
-contain the current GeoServer user, or be null if no authentication is available. This is useful
-if the SQL sessions scripts are used to provide tight control over database access
-
-The SQL script can expand environment variables using the ``${variableName, defaultValue}`` syntax,
-for example the following alters the current database user to be the same as the GeoServer current user,
-or ``geoserver`` in case no user was authenticated
-
- SET SESSION AUTHORIZATION ${GSUSER,geoserver}
-
-Using SQL session scripts to control authorizations at the database level
--------------------------------------------------------------------------
-
-GeoServer connects to a database via a connection pool, using the same rights as the user that
-is specified in the connection pool setup.
-In a setup that provides a variety of services and tables the connection pool user must have
-a rather large set of rights, such as table selection (WMS), table insert/update/delete (WFS-T) and
-even table creation (data upload via RESTConfig, WPS Import process and eventual new processes leveraging
-direct database connections).
-
-What a user can do can be controlled by means of the GeoServer security subsystem, but in high security
-setups this might not be considered enough, and a database level access control be preferred instead.
-In these setups normally the connection pool user has limited access, such as simple read only access,
-while specific users are allowed to perform more operations.
-
-When setting up such a solution remember the following guidelines:
-
-* The connection pool user must be able to access all table metadata regardless of whether it is able
- to actually perform a select on the tables (dictionary tables/describe functionality must be always accessible)
-* The connection pool must see each and every column of tables and views, in other words, the
- structure of the tables must not change as the current user changes
-* the database users and the GeoServer user must be kept in synch with some external tools, GeoServer
- provides no out of the box facilities
-* during the GeoServer startup the code will access the database to perform some sanity checks,
- in that moment there is no user authenticated in GeoServer so the code will run under whatever
- user was specified as the "default value" for the ``GSUSER`` variable.
-* The user that administers GeoServer (normally ``admin``, but it can be renamed, and other users
- given the administration roles too) must also be a database user, all administrative access on the
- GeoServer GUI will have that specific user controlling the session
-
-Typical use cases:
-
-* Give insert/update/delete rights only to users that must use WFS-T
-* Only allow the administrator to create new tables
-* Limit what rows of a table a user can see by using dynamic SQL views taking into account the
- current user to decide what rows to return
-
-To make a point in case, if we want the PostgreSQL session to run with the current GeoServer user
-credentials the following scripts will be used:
-
-.. figure:: images/postgresqlSession.png
- :align: center
-
- *Setting up session authorization for PostgreSQL*
-
-The first command makes the database session use either the current GeoServer user, or the ``geoserver``
-user if no authentication was available (anonymous user, or startup situation).
-The second command resets the session to the rights of the connection pool user.
-
-
+.. _data_sqlsession:
+
+Custom SQL session start/stop scripts
+=====================================
+
+Starting with version 2.1.4 GeoServer support custom SQL scripts that can be run every time GeoServer
+grabs a connection from the connection pool, and every time the session is returned to the pool.
+
+These scripts can be parametrized with the expansion of environment variables, which can be in turn
+set into the OGC request parameters with the same mechanism as :ref:`sld_variable_substitution`.
+
+In addition to the parameters provided via the request the ``GSUSER`` variable is guaranteed to
+contain the current GeoServer user, or be null if no authentication is available. This is useful
+if the SQL sessions scripts are used to provide tight control over database access
+
+The SQL script can expand environment variables using the ``${variableName, defaultValue}`` syntax,
+for example the following alters the current database user to be the same as the GeoServer current user,
+or ``geoserver`` in case no user was authenticated
+
+ SET SESSION AUTHORIZATION ${GSUSER,geoserver}
+
+Using SQL session scripts to control authorizations at the database level
+-------------------------------------------------------------------------
+
+GeoServer connects to a database via a connection pool, using the same rights as the user that
+is specified in the connection pool setup.
+In a setup that provides a variety of services and tables the connection pool user must have
+a rather large set of rights, such as table selection (WMS), table insert/update/delete (WFS-T) and
+even table creation (data upload via RESTConfig, WPS Import process and eventual new processes leveraging
+direct database connections).
+
+What a user can do can be controlled by means of the GeoServer security subsystem, but in high security
+setups this might not be considered enough, and a database level access control be preferred instead.
+In these setups normally the connection pool user has limited access, such as simple read only access,
+while specific users are allowed to perform more operations.
+
+When setting up such a solution remember the following guidelines:
+
+* The connection pool user must be able to access all table metadata regardless of whether it is able
+ to actually perform a select on the tables (dictionary tables/describe functionality must be always accessible)
+* The connection pool must see each and every column of tables and views, in other words, the
+ structure of the tables must not change as the current user changes
+* the database users and the GeoServer user must be kept in synch with some external tools, GeoServer
+ provides no out of the box facilities
+* during the GeoServer startup the code will access the database to perform some sanity checks,
+ in that moment there is no user authenticated in GeoServer so the code will run under whatever
+ user was specified as the "default value" for the ``GSUSER`` variable.
+* The user that administers GeoServer (normally ``admin``, but it can be renamed, and other users
+ given the administration roles too) must also be a database user, all administrative access on the
+ GeoServer GUI will have that specific user controlling the session
+
+Typical use cases:
+
+* Give insert/update/delete rights only to users that must use WFS-T
+* Only allow the administrator to create new tables
+* Limit what rows of a table a user can see by using dynamic SQL views taking into account the
+ current user to decide what rows to return
+
+To make a point in case, if we want the PostgreSQL session to run with the current GeoServer user
+credentials the following scripts will be used:
+
+.. figure:: images/postgresqlSession.png
+ :align: center
+
+ *Setting up session authorization for PostgreSQL*
+
+The first command makes the database session use either the current GeoServer user, or the ``geoserver``
+user if no authentication was available (anonymous user, or startup situation).
+The second command resets the session to the rights of the connection pool user.
+
+
diff --git a/doc/en/user/source/data/database/sqlview.rst b/doc/en/user/source/data/database/sqlview.rst
index 6e28ab3e23f..4e8e544ece6 100644
--- a/doc/en/user/source/data/database/sqlview.rst
+++ b/doc/en/user/source/data/database/sqlview.rst
@@ -1,227 +1,227 @@
-.. _sql_views:
-
-SQL Views
-=========
-
-The traditional way to access database data is to configure layers against either tables or database views.
-Starting with GeoServer 2.1.0, layers can also be defined as SQL Views.
-SQL Views allow executing a custom SQL query on each request to the layer.
-This avoids the need to create a database view for complex queries.
-
-Even more usefully, SQL View queries can be parameterized via string substitution.
-Parameter values can be supplied in both WMS and WFS requests.
-Default values can be supplied for parameters, and input values can be validated by Regular Expressions
-to eliminate the risk of SQL injection attacks.
-
-.. note::
-
- SQL Views are read-only, and thus cannot be updated by WFS-T transactions.
-
-Creating a SQL View
--------------------------
-
-In order to create a SQL View the administrator invokes the :guilabel:`Create new layer` page.
-When a database store is selected, the usual list of tables and views available for publication appears,
-A link :guilabel:`Configure new SQL view...` also appears:
-
-.. figure:: images/createsqlview.png
- :align: center
-
-Selecting the :guilabel:`Configure new SQL view...` link opens a new page where the SQL view query can be specified:
-
-.. figure:: images/createsql.png
- :align: center
-
-.. note::
-
- The query can be any SQL statement that is valid as a subquery in a FROM clause (that is, ``select * from () [as] vtable``).
- This is the case for most SQL statements, but in some databases special syntax may be needed to call stored procedures.
- Also, all the columns returned by the SQL statement must have names.
- In some databases alias names are required for function calls.
-
-When a valid SQL query has been entered, press the :guilabel:`Refresh` link in the **Attributes** table to get the list of the attribute columns determined from the query:
-
-.. figure:: images/sqlview-attributes.png
- :align: center
-
-GeoServer attempts to determine the geometry column type and the native SRID, but these should be verified and corrected if necessary.
-
-.. note::
-
- Having a correct SRID (spatial reference id) is essential for spatial queries to work.
- In many spatial databases the SRID is equal to the EPSG code for the specific spatial reference system, but this is not always the case (for instance, Oracle has a number of non-EPSG SRID codes).
-
-
-If stable feature ids are desired for the view's features, one or more columns providing a unique id for the features should be checked in the **Identifier** column.
-Always ensure these attributes generate a unique key, or filtering and WFS requests will not work correctly.
-
-Once the query and the attribute details are defined, press :guilabel:`Save`.
-The usual :guilabel:`New Layer` configuration page will appear.
-If further changes to the view are required, the page has a link to the SQL View editor at the bottom of the :guilabel:`Data` tab:
-
-.. figure:: images/sqlview-edit.png
- :align: center
-
-Once created, the SQL view layer is used in the same way as a conventional table-backed layer,
-with the one limitation of being read-only.
-
-.. warning:: Saving the SQL view definition here is not sufficient, the layer containing it must be saved as well for the change to have any effect.
- This is because the SQL view definition is actually just one component of the layer/featuretype/coverage attributes.
-
-Parameterizing SQL Views
-------------------------
-
-A parametric SQL view is based on a SQL query containing named parameters.
-The values for the parameters can be provided dynamically in WMS and WFS requests
-using the ``viewparams`` request parameter.
-Parameters can have default values specified,
-to handle the situation where they are not supplied in a request.
-Validation of supplied parameter values is supported by specifying validation regular expressions.
-Parameter values are only accepted if they match the regular expression defined for them.
-Appropriate parameter validation should always be used to avoid the risk of `SQL injection attacks `_.
-
-.. warning::
-
- SQL View parameter substitution should be used with caution, since improperly validated parameters open the risk of SQL injection attack.
- Where possible, consider using safer methods such as :ref:`dynamic filtering ` in the request, or :ref:`sld_variable_substitution`.
-
-
-Defining parameters
-^^^^^^^^^^^^^^^^^^^
-
-Within the SQL View query, parameter names are delimited by leading and trailing ``%`` signs.
-The parameters can occur anywhere within the query text,
-including such uses as within SQL string constants,
-in place of SQL keywords, or representing entire SQL clauses.
-
-Here is an example of a SQL View query for a layer called ``popstates`` with two parameters, ``low`` and ``high``:
-
-.. figure:: images/sqlview-parametricsql.png
- :align: center
-
-Each parameter needs to be defined with its name, an optional default value, and a validation expression.
-The :guilabel:`Guess parameters from SQL` link can be clicked to infer the query parameters automatically, or they can be entered manually.
-The result is a table filled with the parameter names, default values and validation expressions:
-
-.. figure:: images/sqlview-paramdefault.png
- :align: center
-
-In this case the default values should be specified, since the query cannot be executed without values for the parameters (because the expanded query ``select gid, state_name, the_geom from pgstates where persons between and`` is invalid SQL).
-Since the use of the parameters in the SQL query requires their values to be positive integer numbers, the validation regular expressions are specified to allow only numeric input (i.e. ``^[\d]+$``):
-
-.. figure:: images/sqlview-paramcustom.png
- :align: center
-
-Once the parameters have been defined,
-the **Attributes** :guilabel:`Refresh` link is clicked to parse the query and retrieve the attribute columns.
-The computed geometry type and column identifier details can be corrected if required.
-From this point on the workflow is the same as for a non-parameterized query.
-
-
-.. _using_a_parametric_sql_view:
-
-Using a parametric SQL View
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-The SQL view parameters are specified by adding the ``viewparams`` parameter to the WMS ``GetMap``
-or the WFS ``GetFeature`` request.
-The ``viewparams`` argument is a list of ``key:value`` pairs, separated by semicolons:
-
- ``viewparams=p1:v1;p2:v2;...``
-
-If the values contain semicolons or commas these must be escaped with a backslash (e.g. ``\,`` and ``\;``).
-
-For example, the ``popstates`` SQL View layer can be displayed by invoking the :ref:`layerpreview`.
-Initially no parameter values are supplied, so the defaults are used and all the states are displayed.
-
-To display all states having more than 20 million inhabitants the following parameter is added to the ``GetMap`` request: ``&viewparams=low:20000000``
-
-.. figure:: images/sqlview-20millions.png
- :align: center
-
-To display all states having between 2 and 5 million inhabitants the view parameters are: ``&viewparams=low:2000000;high:5000000``
-
-.. figure:: images/sqlview-2m-5m.png
- :align: center
-
-
-Parameters can be provided for multiple layers by separating each parameter map with a comma:
-
- ``&viewparams=l1p1:v1;l1p2:v2,l2p1:v1;l2p2:v2,...``
-
-The number of parameter maps must match the number of layers (featuretypes) included in the request.
-
-Parameters and validation
-^^^^^^^^^^^^^^^^^^^^^^^^^
-
-The value of a SQL View parameter can be an arbitrary string of text.
-The only constraint is that the attribute names and types returned by the view query must never change.
-This makes it possible to create views containing parameters representing complex SQL fragments.
-For example, using the view query ``select * from pgstates %where%`` allows specifying the WHERE clause of the query dynamically.
-However, this would likely require an empty validation expression.
-which presents a serious risk of `SQL injection attacks `_.
-This technique should only be used if access to the server is restricted to trusted clients.
-
-In general, SQL parameters must be used with care.
-They should always include validation regular expressions that accept only the intended parameter values.
-Note that while validation expressions should be constructed to prevent illegal values,
-they do not necessarily have to ensure the values are syntactically correct,
-since this will be checked by the database SQL parser.
-For example:
-
- * ``^[\d\.\+-eE]+$`` checks that a parameter value contains valid characters for floating-point numbers (including scientific notation), but does not check that the value is actually a valid number
- * ``[^;']+`` checks that a parameter value does not contain quotes or semicolons. This prevents common SQL injection attacks, but otherwise does not impose much limitation on the actual value
-
-Resources for Validation Regular expressions
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Defining effective validation regular expressions is important for security.
-Regular expressions are a complex topic that cannot be fully addressed here.
-The following are some resources for constructing regular expressions:
-
- * GeoServer uses the standard Java regular expression engine. The `Pattern class Javadocs `_ contain the full specification of the allowed syntax.
- * ``_ has many tutorials and examples of regular expressions.
- * The `myregexp `_ applet can be used to test regular expressions online.
-
-Place holder for the SQL WHERE clause
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-The SQL ``WHERE`` clause produced by GeoServer using the context filters, e.g. the bounding box filter of a WMS query, will be added around the SQL view definition. This comes handy (better performance) when we have extra operations that can be done on top of the rows filtered with the GeoServer produced filter first.
-
-A typical use case for this functionality is the execution of analytic functions on top of the filtered results:
-
-.. code-block:: sql
-
- SELECT STATION_NAME,
- MEASUREMENT,
- MEASUREMENT_TYPE,
- LOCATION
- FROM
- (SELECT STATION_NAME,
- MEASUREMENT,
- MEASUREMENT_TYPE,
- LOCATION,
- ROW_NUMBER() OVER(PARTITION BY STATION_ID, MEASUREMENT_TYPE
- ORDER BY TIME DESC) AS RANK
- FROM
- (SELECT st.id AS STATION_ID,
- st.common_name AS STATION_NAME,
- ob.value AS MEASUREMENT,
- pr.param_name AS MEASUREMENT_TYPE,
- ob.time AS TIME,
- st.position AS LOCATION
- FROM meteo.meteo_stations st
- LEFT JOIN meteo.meteo_observations ob ON st.id = ob.station_id
- LEFT JOIN meteo.meteo_parameters pr ON ob.parameter_id = pr.id
-
- -- SQL WHERE clause place holder for GeoServer
- WHERE 1 = 1 :where_clause:) AS stations_filtered) AS stations
-
- WHERE RANK = 1;
-
-A few restrictions apply when using the explicit ``:where_clause:`` place holder:
-
- * it needs to be added in a position where all the attributes known by GeoServer are already present
- * the ``:where_clause:`` can only appear once
-
-When a ``WHERE`` clause place holder is present, GeoServer will always add an explicit ``AND`` at the beginning of the produced ``WHERE`` clause. This allows the injection of the produced ``WHERE`` in the middle of complex expressions if needed.
+.. _sql_views:
+
+SQL Views
+=========
+
+The traditional way to access database data is to configure layers against either tables or database views.
+Starting with GeoServer 2.1.0, layers can also be defined as SQL Views.
+SQL Views allow executing a custom SQL query on each request to the layer.
+This avoids the need to create a database view for complex queries.
+
+Even more usefully, SQL View queries can be parameterized via string substitution.
+Parameter values can be supplied in both WMS and WFS requests.
+Default values can be supplied for parameters, and input values can be validated by Regular Expressions
+to eliminate the risk of SQL injection attacks.
+
+.. note::
+
+ SQL Views are read-only, and thus cannot be updated by WFS-T transactions.
+
+Creating a SQL View
+-------------------------
+
+In order to create a SQL View the administrator invokes the :guilabel:`Create new layer` page.
+When a database store is selected, the usual list of tables and views available for publication appears,
+A link :guilabel:`Configure new SQL view...` also appears:
+
+.. figure:: images/createsqlview.png
+ :align: center
+
+Selecting the :guilabel:`Configure new SQL view...` link opens a new page where the SQL view query can be specified:
+
+.. figure:: images/createsql.png
+ :align: center
+
+.. note::
+
+ The query can be any SQL statement that is valid as a subquery in a FROM clause (that is, ``select * from () [as] vtable``).
+ This is the case for most SQL statements, but in some databases special syntax may be needed to call stored procedures.
+ Also, all the columns returned by the SQL statement must have names.
+ In some databases alias names are required for function calls.
+
+When a valid SQL query has been entered, press the :guilabel:`Refresh` link in the **Attributes** table to get the list of the attribute columns determined from the query:
+
+.. figure:: images/sqlview-attributes.png
+ :align: center
+
+GeoServer attempts to determine the geometry column type and the native SRID, but these should be verified and corrected if necessary.
+
+.. note::
+
+ Having a correct SRID (spatial reference id) is essential for spatial queries to work.
+ In many spatial databases the SRID is equal to the EPSG code for the specific spatial reference system, but this is not always the case (for instance, Oracle has a number of non-EPSG SRID codes).
+
+
+If stable feature ids are desired for the view's features, one or more columns providing a unique id for the features should be checked in the **Identifier** column.
+Always ensure these attributes generate a unique key, or filtering and WFS requests will not work correctly.
+
+Once the query and the attribute details are defined, press :guilabel:`Save`.
+The usual :guilabel:`New Layer` configuration page will appear.
+If further changes to the view are required, the page has a link to the SQL View editor at the bottom of the :guilabel:`Data` tab:
+
+.. figure:: images/sqlview-edit.png
+ :align: center
+
+Once created, the SQL view layer is used in the same way as a conventional table-backed layer,
+with the one limitation of being read-only.
+
+.. warning:: Saving the SQL view definition here is not sufficient, the layer containing it must be saved as well for the change to have any effect.
+ This is because the SQL view definition is actually just one component of the layer/featuretype/coverage attributes.
+
+Parameterizing SQL Views
+------------------------
+
+A parametric SQL view is based on a SQL query containing named parameters.
+The values for the parameters can be provided dynamically in WMS and WFS requests
+using the ``viewparams`` request parameter.
+Parameters can have default values specified,
+to handle the situation where they are not supplied in a request.
+Validation of supplied parameter values is supported by specifying validation regular expressions.
+Parameter values are only accepted if they match the regular expression defined for them.
+Appropriate parameter validation should always be used to avoid the risk of `SQL injection attacks `_.
+
+.. warning::
+
+ SQL View parameter substitution should be used with caution, since improperly validated parameters open the risk of SQL injection attack.
+ Where possible, consider using safer methods such as :ref:`dynamic filtering ` in the request, or :ref:`sld_variable_substitution`.
+
+
+Defining parameters
+^^^^^^^^^^^^^^^^^^^
+
+Within the SQL View query, parameter names are delimited by leading and trailing ``%`` signs.
+The parameters can occur anywhere within the query text,
+including such uses as within SQL string constants,
+in place of SQL keywords, or representing entire SQL clauses.
+
+Here is an example of a SQL View query for a layer called ``popstates`` with two parameters, ``low`` and ``high``:
+
+.. figure:: images/sqlview-parametricsql.png
+ :align: center
+
+Each parameter needs to be defined with its name, an optional default value, and a validation expression.
+The :guilabel:`Guess parameters from SQL` link can be clicked to infer the query parameters automatically, or they can be entered manually.
+The result is a table filled with the parameter names, default values and validation expressions:
+
+.. figure:: images/sqlview-paramdefault.png
+ :align: center
+
+In this case the default values should be specified, since the query cannot be executed without values for the parameters (because the expanded query ``select gid, state_name, the_geom from pgstates where persons between and`` is invalid SQL).
+Since the use of the parameters in the SQL query requires their values to be positive integer numbers, the validation regular expressions are specified to allow only numeric input (i.e. ``^[\d]+$``):
+
+.. figure:: images/sqlview-paramcustom.png
+ :align: center
+
+Once the parameters have been defined,
+the **Attributes** :guilabel:`Refresh` link is clicked to parse the query and retrieve the attribute columns.
+The computed geometry type and column identifier details can be corrected if required.
+From this point on the workflow is the same as for a non-parameterized query.
+
+
+.. _using_a_parametric_sql_view:
+
+Using a parametric SQL View
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The SQL view parameters are specified by adding the ``viewparams`` parameter to the WMS ``GetMap``
+or the WFS ``GetFeature`` request.
+The ``viewparams`` argument is a list of ``key:value`` pairs, separated by semicolons:
+
+ ``viewparams=p1:v1;p2:v2;...``
+
+If the values contain semicolons or commas these must be escaped with a backslash (e.g. ``\,`` and ``\;``).
+
+For example, the ``popstates`` SQL View layer can be displayed by invoking the :ref:`layerpreview`.
+Initially no parameter values are supplied, so the defaults are used and all the states are displayed.
+
+To display all states having more than 20 million inhabitants the following parameter is added to the ``GetMap`` request: ``&viewparams=low:20000000``
+
+.. figure:: images/sqlview-20millions.png
+ :align: center
+
+To display all states having between 2 and 5 million inhabitants the view parameters are: ``&viewparams=low:2000000;high:5000000``
+
+.. figure:: images/sqlview-2m-5m.png
+ :align: center
+
+
+Parameters can be provided for multiple layers by separating each parameter map with a comma:
+
+ ``&viewparams=l1p1:v1;l1p2:v2,l2p1:v1;l2p2:v2,...``
+
+The number of parameter maps must match the number of layers (featuretypes) included in the request.
+
+Parameters and validation
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The value of a SQL View parameter can be an arbitrary string of text.
+The only constraint is that the attribute names and types returned by the view query must never change.
+This makes it possible to create views containing parameters representing complex SQL fragments.
+For example, using the view query ``select * from pgstates %where%`` allows specifying the WHERE clause of the query dynamically.
+However, this would likely require an empty validation expression.
+which presents a serious risk of `SQL injection attacks `_.
+This technique should only be used if access to the server is restricted to trusted clients.
+
+In general, SQL parameters must be used with care.
+They should always include validation regular expressions that accept only the intended parameter values.
+Note that while validation expressions should be constructed to prevent illegal values,
+they do not necessarily have to ensure the values are syntactically correct,
+since this will be checked by the database SQL parser.
+For example:
+
+ * ``^[\d\.\+-eE]+$`` checks that a parameter value contains valid characters for floating-point numbers (including scientific notation), but does not check that the value is actually a valid number
+ * ``[^;']+`` checks that a parameter value does not contain quotes or semicolons. This prevents common SQL injection attacks, but otherwise does not impose much limitation on the actual value
+
+Resources for Validation Regular expressions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Defining effective validation regular expressions is important for security.
+Regular expressions are a complex topic that cannot be fully addressed here.
+The following are some resources for constructing regular expressions:
+
+ * GeoServer uses the standard Java regular expression engine. The `Pattern class Javadocs `_ contain the full specification of the allowed syntax.
+ * ``_ has many tutorials and examples of regular expressions.
+ * The `myregexp `_ applet can be used to test regular expressions online.
+
+Place holder for the SQL WHERE clause
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The SQL ``WHERE`` clause produced by GeoServer using the context filters, e.g. the bounding box filter of a WMS query, will be added around the SQL view definition. This comes handy (better performance) when we have extra operations that can be done on top of the rows filtered with the GeoServer produced filter first.
+
+A typical use case for this functionality is the execution of analytic functions on top of the filtered results:
+
+.. code-block:: sql
+
+ SELECT STATION_NAME,
+ MEASUREMENT,
+ MEASUREMENT_TYPE,
+ LOCATION
+ FROM
+ (SELECT STATION_NAME,
+ MEASUREMENT,
+ MEASUREMENT_TYPE,
+ LOCATION,
+ ROW_NUMBER() OVER(PARTITION BY STATION_ID, MEASUREMENT_TYPE
+ ORDER BY TIME DESC) AS RANK
+ FROM
+ (SELECT st.id AS STATION_ID,
+ st.common_name AS STATION_NAME,
+ ob.value AS MEASUREMENT,
+ pr.param_name AS MEASUREMENT_TYPE,
+ ob.time AS TIME,
+ st.position AS LOCATION
+ FROM meteo.meteo_stations st
+ LEFT JOIN meteo.meteo_observations ob ON st.id = ob.station_id
+ LEFT JOIN meteo.meteo_parameters pr ON ob.parameter_id = pr.id
+
+ -- SQL WHERE clause place holder for GeoServer
+ WHERE 1 = 1 :where_clause:) AS stations_filtered) AS stations
+
+ WHERE RANK = 1;
+
+A few restrictions apply when using the explicit ``:where_clause:`` place holder:
+
+ * it needs to be added in a position where all the attributes known by GeoServer are already present
+ * the ``:where_clause:`` can only appear once
+
+When a ``WHERE`` clause place holder is present, GeoServer will always add an explicit ``AND`` at the beginning of the produced ``WHERE`` clause. This allows the injection of the produced ``WHERE`` in the middle of complex expressions if needed.
diff --git a/doc/en/user/source/data/raster/arcgrid.rst b/doc/en/user/source/data/raster/arcgrid.rst
index 1010e4e9756..019dcbf90b5 100644
--- a/doc/en/user/source/data/raster/arcgrid.rst
+++ b/doc/en/user/source/data/raster/arcgrid.rst
@@ -1,40 +1,40 @@
-.. _data_arcgrid:
-
-ArcGrid
-=======
-
-ArcGrid is a coverage file format created by ESRI.
-
-Adding an ArcGrid data store
-----------------------------
-
-By default, :guilabel:`ArcGrid` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
-
-.. figure:: images/arcgridcreate.png
- :align: center
-
- *ArcGrid in the list of raster data stores*
-
-Configuring a ArcGrid data store
---------------------------------
-
-.. figure:: images/arcgridconfigure.png
- :align: center
-
- *Configuring an ArcGrid data store*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - ``Workspace``
- -
- * - ``Data Source Name``
- -
- * - ``Description``
- -
- * - ``Enabled``
- -
- * - ``URL``
+.. _data_arcgrid:
+
+ArcGrid
+=======
+
+ArcGrid is a coverage file format created by ESRI.
+
+Adding an ArcGrid data store
+----------------------------
+
+By default, :guilabel:`ArcGrid` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
+
+.. figure:: images/arcgridcreate.png
+ :align: center
+
+ *ArcGrid in the list of raster data stores*
+
+Configuring a ArcGrid data store
+--------------------------------
+
+.. figure:: images/arcgridconfigure.png
+ :align: center
+
+ *Configuring an ArcGrid data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - ``Workspace``
+ -
+ * - ``Data Source Name``
+ -
+ * - ``Description``
+ -
+ * - ``Enabled``
+ -
+ * - ``URL``
-
\ No newline at end of file
diff --git a/doc/en/user/source/data/raster/geotiff.rst b/doc/en/user/source/data/raster/geotiff.rst
index d4bd73507d3..e5b41f6ac4e 100644
--- a/doc/en/user/source/data/raster/geotiff.rst
+++ b/doc/en/user/source/data/raster/geotiff.rst
@@ -1,47 +1,47 @@
-.. _data_geotiff:
-
-GeoTIFF
-=======
-
-A GeoTIFF is a georeferenced TIFF (Tagged Image File Format) file.
-
-Adding a GeoTIFF data store
----------------------------
-
-By default, :guilabel:`GeoTIFF` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
-
-.. figure:: images/geotiffcreate.png
- :align: center
-
- *GeoTIFF in the list of raster data stores*
-
-Configuring a GeoTIFF data store
---------------------------------
-
-.. figure:: images/geotiffconfigure.png
- :align: center
-
- *Configuring a GeoTIFF data store*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - ``Workspace``
- - Name of the workspace to contain the GeoTIFF store. This will also be the prefix of the raster layer created from the store.
- * - ``Data Source Name``
- - Name of the GeoTIFF as it will be known to GeoServer. This can be different from the filename. The combination of the workspace name and this name will be the full layer name (ex: world:landbase)
- * - ``Description``
- - A full free-form description of the GeoTIFF store.
- * - ``Enabled``
- - If checked, it enables the store. If unchecked (disabled), no data in the GeoTIFF will be served from GeoServer.
- * - ``URL``
- - Location of the GeoTIFF file. This can be an absolute path (such as :file:`file:C:\\Data\\landbase.tif`) or a path relative to GeoServer's data directory (such as :file:`file:data/landbase.tif`).
-
-.. note:: Notice that the GeoTiff plugin is able to handle internal/external overviews and internal/external masks.
-
-Custom CRS definition
-`````````````````````
-
-Creating an auxiliary ``.prj`` file that contains coordinate reference system information as described in the :ref:`crs_custom` chapter will override internal CRS tags that are included in the original GeoTIFF file. This can be used to work-around problematic source files without making modifications to the file.
+.. _data_geotiff:
+
+GeoTIFF
+=======
+
+A GeoTIFF is a georeferenced TIFF (Tagged Image File Format) file.
+
+Adding a GeoTIFF data store
+---------------------------
+
+By default, :guilabel:`GeoTIFF` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
+
+.. figure:: images/geotiffcreate.png
+ :align: center
+
+ *GeoTIFF in the list of raster data stores*
+
+Configuring a GeoTIFF data store
+--------------------------------
+
+.. figure:: images/geotiffconfigure.png
+ :align: center
+
+ *Configuring a GeoTIFF data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - ``Workspace``
+ - Name of the workspace to contain the GeoTIFF store. This will also be the prefix of the raster layer created from the store.
+ * - ``Data Source Name``
+ - Name of the GeoTIFF as it will be known to GeoServer. This can be different from the filename. The combination of the workspace name and this name will be the full layer name (ex: world:landbase)
+ * - ``Description``
+ - A full free-form description of the GeoTIFF store.
+ * - ``Enabled``
+ - If checked, it enables the store. If unchecked (disabled), no data in the GeoTIFF will be served from GeoServer.
+ * - ``URL``
+ - Location of the GeoTIFF file. This can be an absolute path (such as :file:`file:C:\\Data\\landbase.tif`) or a path relative to GeoServer's data directory (such as :file:`file:data/landbase.tif`).
+
+.. note:: Notice that the GeoTiff plugin is able to handle internal/external overviews and internal/external masks.
+
+Custom CRS definition
+`````````````````````
+
+Creating an auxiliary ``.prj`` file that contains coordinate reference system information as described in the :ref:`crs_custom` chapter will override internal CRS tags that are included in the original GeoTIFF file. This can be used to work-around problematic source files without making modifications to the file.
diff --git a/doc/en/user/source/data/raster/imagepyramid.rst b/doc/en/user/source/data/raster/imagepyramid.rst
index 37569f5588c..3fb5db579a4 100644
--- a/doc/en/user/source/data/raster/imagepyramid.rst
+++ b/doc/en/user/source/data/raster/imagepyramid.rst
@@ -1,54 +1,54 @@
-.. _data_imagepyramid:
-
-ImagePyramid
-=============
-
-.. note:: GeoServer does not come built-in with support for Image Pyramid; it must be installed through an extension. Proceed to :ref:`imagepyramid_install` for installation details.
-
-An image pyramid is several layers of an image rendered at various image sizes, to be shown at different zoom levels.
-
-.. _imagepyramid_install:
-
-Installing the ImagePyramid extension
--------------------------------------
-
-#. Download the ImagePyramid extension from the `GeoServer download page
- `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding an ImagePyramid data store
----------------------------------
-
-Once the extension is properly installed :guilabel:`ImagePyramid` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
-
-.. figure:: images/imagepyramidcreate.png
- :align: center
-
- *ImagePyramid in the list of raster data stores*
-
-Configuring an ImagePyramid data store
---------------------------------------
-
-.. figure:: images/imagepyramidconfigure.png
- :align: center
-
- *Configuring an ImagePyramid data store*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - ``Workspace``
- -
- * - ``Data Source Name``
- -
- * - ``Description``
- -
- * - ``Enabled``
- -
- * - ``URL``
+.. _data_imagepyramid:
+
+ImagePyramid
+=============
+
+.. note:: GeoServer does not come built-in with support for Image Pyramid; it must be installed through an extension. Proceed to :ref:`imagepyramid_install` for installation details.
+
+An image pyramid is several layers of an image rendered at various image sizes, to be shown at different zoom levels.
+
+.. _imagepyramid_install:
+
+Installing the ImagePyramid extension
+-------------------------------------
+
+#. Download the ImagePyramid extension from the `GeoServer download page
+ `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding an ImagePyramid data store
+---------------------------------
+
+Once the extension is properly installed :guilabel:`ImagePyramid` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
+
+.. figure:: images/imagepyramidcreate.png
+ :align: center
+
+ *ImagePyramid in the list of raster data stores*
+
+Configuring an ImagePyramid data store
+--------------------------------------
+
+.. figure:: images/imagepyramidconfigure.png
+ :align: center
+
+ *Configuring an ImagePyramid data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - ``Workspace``
+ -
+ * - ``Data Source Name``
+ -
+ * - ``Description``
+ -
+ * - ``Enabled``
+ -
+ * - ``URL``
-
\ No newline at end of file
diff --git a/doc/en/user/source/data/raster/worldimage.rst b/doc/en/user/source/data/raster/worldimage.rst
index 57abaaa0a07..3c3c70ffeb5 100644
--- a/doc/en/user/source/data/raster/worldimage.rst
+++ b/doc/en/user/source/data/raster/worldimage.rst
@@ -1,40 +1,40 @@
-.. _data_worldimage:
-
-WorldImage
-==========
-
-A world file is a plain text file used to georeference raster map images. This file (often with an extension of ``.jgw`` or ``.tfw``) accompanies an associated image file (``.jpg`` or ``.tif``). Together, the world file and the corresponding image file is known as a WorldImage in GeoServer.
-
-Adding a WorldImage data store
-------------------------------
-
-By default, :guilabel:`WorldImage` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
-
-.. figure:: images/worldimagecreate.png
- :align: center
-
- *WorldImage in the list of raster data stores*
-
-Configuring a WorldImage data store
------------------------------------
-
-.. figure:: images/worldimageconfigure.png
- :align: center
-
- *Configuring a WorldImage data store*
-
-.. list-table::
- :widths: 20 80
-
- * - **Option**
- - **Description**
- * - ``Workspace``
- -
- * - ``Data Source Name``
- -
- * - ``Description``
- -
- * - ``Enabled``
- -
- * - ``URL``
+.. _data_worldimage:
+
+WorldImage
+==========
+
+A world file is a plain text file used to georeference raster map images. This file (often with an extension of ``.jgw`` or ``.tfw``) accompanies an associated image file (``.jpg`` or ``.tif``). Together, the world file and the corresponding image file is known as a WorldImage in GeoServer.
+
+Adding a WorldImage data store
+------------------------------
+
+By default, :guilabel:`WorldImage` will be an option in the :guilabel:`Raster Data Sources` list when creating a new data store.
+
+.. figure:: images/worldimagecreate.png
+ :align: center
+
+ *WorldImage in the list of raster data stores*
+
+Configuring a WorldImage data store
+-----------------------------------
+
+.. figure:: images/worldimageconfigure.png
+ :align: center
+
+ *Configuring a WorldImage data store*
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Option**
+ - **Description**
+ * - ``Workspace``
+ -
+ * - ``Data Source Name``
+ -
+ * - ``Description``
+ -
+ * - ``Enabled``
+ -
+ * - ``URL``
-
\ No newline at end of file
diff --git a/doc/en/user/source/data/vector/featurepregen.rst b/doc/en/user/source/data/vector/featurepregen.rst
index 4e47dc63f97..218ffe35b0a 100644
--- a/doc/en/user/source/data/vector/featurepregen.rst
+++ b/doc/en/user/source/data/vector/featurepregen.rst
@@ -1,36 +1,36 @@
-.. _data_featurepregen:
-
-Pregeneralized Features
-=======================
-
-.. note:: GeoServer does not come built-in with support for Pregeneralized Features; it must be installed through an extension.
-
-Installing the Pregeneralized Features extension
-------------------------------------------------
-
-#. Download the Pregeneralized Features extension from the `GeoServer download page
- `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding a Pregeneralized Features data store
--------------------------------------------
-
-If the extension is properly installed, :guilabel:`Generalized Data Store` will be listed as an option when creating a new data store.
-
-.. figure:: images/featurepregencreate.png
- :align: center
-
- *Generalized Data Store in the list of vector data stores*
-
-Configuring a Pregeneralized Features data store
-------------------------------------------------
-
-.. figure:: images/featurepregenconfigure.png
- :align: center
-
- *Configuring a Pregeneralized Features data store*
-
-For a detailed description, look at the :doc:`Tutorial`
+.. _data_featurepregen:
+
+Pregeneralized Features
+=======================
+
+.. note:: GeoServer does not come built-in with support for Pregeneralized Features; it must be installed through an extension.
+
+Installing the Pregeneralized Features extension
+------------------------------------------------
+
+#. Download the Pregeneralized Features extension from the `GeoServer download page
+ `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding a Pregeneralized Features data store
+-------------------------------------------
+
+If the extension is properly installed, :guilabel:`Generalized Data Store` will be listed as an option when creating a new data store.
+
+.. figure:: images/featurepregencreate.png
+ :align: center
+
+ *Generalized Data Store in the list of vector data stores*
+
+Configuring a Pregeneralized Features data store
+------------------------------------------------
+
+.. figure:: images/featurepregenconfigure.png
+ :align: center
+
+ *Configuring a Pregeneralized Features data store*
+
+For a detailed description, look at the :doc:`Tutorial`
diff --git a/doc/en/user/source/data/vector/gml.rst b/doc/en/user/source/data/vector/gml.rst
index 96bfe17c98b..bb15865a177 100644
--- a/doc/en/user/source/data/vector/gml.rst
+++ b/doc/en/user/source/data/vector/gml.rst
@@ -1,47 +1,47 @@
-.. _data_gml:
-
-GML
-===
-
-.. note:: GeoServer does not come built-in with support for GML; it must be installed through an extension. Proceed to :ref:`gml_install` for installation details.
-
-.. warning:: Currently the GML extension is unmaintained and carries unsupported status. While still usable, do not expect the same reliability as with other extension.
-
-Geographic Markup Language (GML) is a XML based format for representing vector based spatial data.
-
-
-Supported versions
-------------------
-
-Currently GML version 2 is supported.
-
-.. _gml_install:
-
-Installing the GML extension
-----------------------------
-
-#. Download the GML extension from the `GeoServer download page
- `_.
-
- .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
-
-#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
-
-Adding a GML data store
------------------------
-
-Once the extension is properly installed :guilabel:`GML` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
-
-.. figure:: images/gmlcreate.png
- :align: center
-
- *GML in the list of vector data stores*
-
-Configuring a GML data store
-----------------------------
-
-.. figure:: images/gmlconfigure.png
- :align: center
-
- *Configuring a GML data store*
+.. _data_gml:
+
+GML
+===
+
+.. note:: GeoServer does not come built-in with support for GML; it must be installed through an extension. Proceed to :ref:`gml_install` for installation details.
+
+.. warning:: Currently the GML extension is unmaintained and carries unsupported status. While still usable, do not expect the same reliability as with other extension.
+
+Geographic Markup Language (GML) is a XML based format for representing vector based spatial data.
+
+
+Supported versions
+------------------
+
+Currently GML version 2 is supported.
+
+.. _gml_install:
+
+Installing the GML extension
+----------------------------
+
+#. Download the GML extension from the `GeoServer download page
+ `_.
+
+ .. warning:: Make sure to match the version of the extension to the version of the GeoServer instance!
+
+#. Extract the contents of the archive into the ``WEB-INF/lib`` directory of the GeoServer installation.
+
+Adding a GML data store
+-----------------------
+
+Once the extension is properly installed :guilabel:`GML` will be an option in the :guilabel:`Vector Data Sources` list when creating a new data store.
+
+.. figure:: images/gmlcreate.png
+ :align: center
+
+ *GML in the list of vector data stores*
+
+Configuring a GML data store
+----------------------------
+
+.. figure:: images/gmlconfigure.png
+ :align: center
+
+ *Configuring a GML data store*
\ No newline at end of file
diff --git a/doc/en/user/source/extensions/excel.rst b/doc/en/user/source/extensions/excel.rst
index 46140b15636..23ce34497b3 100644
--- a/doc/en/user/source/extensions/excel.rst
+++ b/doc/en/user/source/extensions/excel.rst
@@ -1,50 +1,50 @@
-.. _excel_extension:
-
-Excel WFS Output Format
-=======================
-
-The GeoServer Excel plugin adds the ability to output WFS responses in either Excel 97-2003 (``.xls``) or Excel 2007 (``.xlsx``) formats.
-
-Installation
-------------
-
- 1. Download the Excel plugin for your version of GeoServer from the `download page `_.
- 2. Unzip the archive into the WEB-INF/lib directory of the GeoServer installation.
- 3. Restart GeoServer.
-
-Usage
------
-
-When making a WFS request, set the ``outputFormat`` to ``excel`` (for Excel 97-2003) or ``excel2007`` (for Excel 2007).
-
-Examples
---------
-
-Excel 97-2003 GET:
- http://localhost:8080/geoserver/wfs?request=GetFeature&version=1.1.0&typeName=topp:states&outputFormat=excel
-
-Excel 2007 GET:
- http://localhost:8080/geoserver/wfs?request=GetFeature&version=1.1.0&typeName=topp:states&outputFormat=excel2007
-
-**Excel 97-2003 POST**::
-
-
-
-
-
-Limitations
------------
-
-Excel 97-2003 files are stored in a binary format and are thus space-efficient, but have inherent size limitations (65,526 rows per sheet; 256 columns per sheet).
-
-Excel 2007 files are XML-based, and have much higher limits (1,048,576 rows per sheet; 16,384 columns per sheet).
-However, because they are text files Excel 2007 files are usually larger than Excel 97-2003 files.
-
-If the number of rows in a sheet or characters in a cell exceeds the limits of the chosen Excel file format, warning text is inserted to indicate the truncation.
+.. _excel_extension:
+
+Excel WFS Output Format
+=======================
+
+The GeoServer Excel plugin adds the ability to output WFS responses in either Excel 97-2003 (``.xls``) or Excel 2007 (``.xlsx``) formats.
+
+Installation
+------------
+
+ 1. Download the Excel plugin for your version of GeoServer from the `download page `_.
+ 2. Unzip the archive into the WEB-INF/lib directory of the GeoServer installation.
+ 3. Restart GeoServer.
+
+Usage
+-----
+
+When making a WFS request, set the ``outputFormat`` to ``excel`` (for Excel 97-2003) or ``excel2007`` (for Excel 2007).
+
+Examples
+--------
+
+Excel 97-2003 GET:
+ http://localhost:8080/geoserver/wfs?request=GetFeature&version=1.1.0&typeName=topp:states&outputFormat=excel
+
+Excel 2007 GET:
+ http://localhost:8080/geoserver/wfs?request=GetFeature&version=1.1.0&typeName=topp:states&outputFormat=excel2007
+
+**Excel 97-2003 POST**::
+
+
+
+
+
+Limitations
+-----------
+
+Excel 97-2003 files are stored in a binary format and are thus space-efficient, but have inherent size limitations (65,526 rows per sheet; 256 columns per sheet).
+
+Excel 2007 files are XML-based, and have much higher limits (1,048,576 rows per sheet; 16,384 columns per sheet).
+However, because they are text files Excel 2007 files are usually larger than Excel 97-2003 files.
+
+If the number of rows in a sheet or characters in a cell exceeds the limits of the chosen Excel file format, warning text is inserted to indicate the truncation.
diff --git a/doc/en/user/source/extensions/imagemap.rst b/doc/en/user/source/extensions/imagemap.rst
index 045136fc33f..67fc86c30d5 100644
--- a/doc/en/user/source/extensions/imagemap.rst
+++ b/doc/en/user/source/extensions/imagemap.rst
@@ -1,63 +1,63 @@
-.. _imagemap_extension:
-
-Imagemap
-========
-
-HTML ImageMaps have been used for a long time to create interactive images in a light way. Without using Flash, SVG or VML you can simply associate different links or tooltips to different regions of an image.
-Why can't we use this technique to achieve the same result on a GeoServer map?
-The idea is to combine a raster map (png, gif, jpeg, ...) with an HTML ImageMap overlay to add links, tooltips, or mouse events behavior to the map.
-
-An example of an ImageMap adding tooltips to a map:
-
-.. code-block:: xml
-
-
-
-
-An example of an ImageMap adding links to a map:
-
-.. code-block:: xml
-
-
-
-
-A more complex example adding interactive behaviour on mouse events:
-
-.. code-block:: xml
-
-
-
-
-To realize this in GeoServer some great community contributors developed an HTMLImageMap GetMapProducer for GeoServer, able to render an HTMLImageMap in response to a WMS GetMap request.
-
-The GetMapProducer is associated to the text/html mime type. It produces, for each requested layer, a section containing the geometries of the layer as distinct tags.
-Due to the limitations in the shape types supported by the tag, a single geometry can be split into multiple ones. This way almost any complex geometry can be rendered transforming it into simpler ones.
-
-To add interactive attributes we use styling. In particular, an SLD Rule containing a TextSymbolizer with a Label definition can be used to define dynamic values for the tags attributes. The Rule name will be used as the attribute name.
-
-As an example, to define a title attribute (associating a tooltip to the geometries of the layer) you can use a rule like the following one:
-
-.. code-block:: xml
-
-
- title
-
-
-
-
-
-To render multiple attributes, just define multiple rules, with different names (href, onmouseover, etc.)
-
-Styling support is not limited to TextSymbolizers, you can currently use other symbolizers to detail rendering. For example you can:
-
- * use a PointSymbolizer with a Size property to define point sizes.
- * use LineSymbolizer with a stroke-width CssParameter to create thick lines.
+.. _imagemap_extension:
+
+Imagemap
+========
+
+HTML ImageMaps have been used for a long time to create interactive images in a light way. Without using Flash, SVG or VML you can simply associate different links or tooltips to different regions of an image.
+Why can't we use this technique to achieve the same result on a GeoServer map?
+The idea is to combine a raster map (png, gif, jpeg, ...) with an HTML ImageMap overlay to add links, tooltips, or mouse events behavior to the map.
+
+An example of an ImageMap adding tooltips to a map:
+
+.. code-block:: xml
+
+
+
+
+An example of an ImageMap adding links to a map:
+
+.. code-block:: xml
+
+
+
+
+A more complex example adding interactive behaviour on mouse events:
+
+.. code-block:: xml
+
+
+
+
+To realize this in GeoServer some great community contributors developed an HTMLImageMap GetMapProducer for GeoServer, able to render an HTMLImageMap in response to a WMS GetMap request.
+
+The GetMapProducer is associated to the text/html mime type. It produces, for each requested layer, a section containing the geometries of the layer as distinct tags.
+Due to the limitations in the shape types supported by the tag, a single geometry can be split into multiple ones. This way almost any complex geometry can be rendered transforming it into simpler ones.
+
+To add interactive attributes we use styling. In particular, an SLD Rule containing a TextSymbolizer with a Label definition can be used to define dynamic values for the tags attributes. The Rule name will be used as the attribute name.
+
+As an example, to define a title attribute (associating a tooltip to the geometries of the layer) you can use a rule like the following one:
+
+.. code-block:: xml
+
+
+ title
+
+
+
+
+
+To render multiple attributes, just define multiple rules, with different names (href, onmouseover, etc.)
+
+Styling support is not limited to TextSymbolizers, you can currently use other symbolizers to detail rendering. For example you can:
+
+ * use a PointSymbolizer with a Size property to define point sizes.
+ * use LineSymbolizer with a stroke-width CssParameter to create thick lines.
diff --git a/doc/en/user/source/extensions/index.rst b/doc/en/user/source/extensions/index.rst
index e6e0328dacf..494d354b38f 100644
--- a/doc/en/user/source/extensions/index.rst
+++ b/doc/en/user/source/extensions/index.rst
@@ -1,43 +1,43 @@
-.. _extensions:
-
-Extensions
-==========
-
-Extensions are modules that add functionality to GeoServer. They are installed as add-ons to the base GeoServer installation.
-
-This section describes most of the extensions available for GeoServer. Other data formats can be found in the :ref:`data_vector`, :ref:`data_raster`, :ref:`data_database`, and :ref:`styling` sections.
-
-.. toctree::
- :maxdepth: 1
-
- authkey/index
- controlflow/index
- dxf/index
- excel
- grib/grib
- imagemap
- importer/index
- inspire/index
- jp2k/index
- libjpeg-turbo/index
- monitoring/index
- netcdf/netcdf
- netcdf-out/index
- ogr
- printing/index
- querylayer/index
- vectortiles/index
- xslt/index
- wcs20eo/index
- mongodb/index
- sldservice/index
- geofence/index
- geofence-server/index
- cas/index
- params-extractor/index
- gwc-s3/index
- wmts-multidimensional/index
- wps-download/index
- wps-jdbc/index
- mapml/index
-
+.. _extensions:
+
+Extensions
+==========
+
+Extensions are modules that add functionality to GeoServer. They are installed as add-ons to the base GeoServer installation.
+
+This section describes most of the extensions available for GeoServer. Other data formats can be found in the :ref:`data_vector`, :ref:`data_raster`, :ref:`data_database`, and :ref:`styling` sections.
+
+.. toctree::
+ :maxdepth: 1
+
+ authkey/index
+ controlflow/index
+ dxf/index
+ excel
+ grib/grib
+ imagemap
+ importer/index
+ inspire/index
+ jp2k/index
+ libjpeg-turbo/index
+ monitoring/index
+ netcdf/netcdf
+ netcdf-out/index
+ ogr
+ printing/index
+ querylayer/index
+ vectortiles/index
+ xslt/index
+ wcs20eo/index
+ mongodb/index
+ sldservice/index
+ geofence/index
+ geofence-server/index
+ cas/index
+ params-extractor/index
+ gwc-s3/index
+ wmts-multidimensional/index
+ wps-download/index
+ wps-jdbc/index
+ mapml/index
+
diff --git a/doc/en/user/source/extensions/inspire/installing.rst b/doc/en/user/source/extensions/inspire/installing.rst
index 45bdcfd6297..425b34f97ee 100644
--- a/doc/en/user/source/extensions/inspire/installing.rst
+++ b/doc/en/user/source/extensions/inspire/installing.rst
@@ -1,14 +1,14 @@
-.. _inspire_installing:
-
-Installing the INSPIRE extension
-================================
-
-The INSPIRE extension is a official extension available at `GeoServer download `_ pages (starting with GeoServer 2.3.2).
-
-#. Download the inspire zip release file from the download page of your version of GeoServer
-
-#. Extract the archive and copy the contents into the ``/WEB-INF/lib`` directory.
-
-#. Restart GeoServer.
-
-To verify that the extension was installed successfully, please see the next section on :ref:`inspire_using`.
+.. _inspire_installing:
+
+Installing the INSPIRE extension
+================================
+
+The INSPIRE extension is a official extension available at `GeoServer download `_ pages (starting with GeoServer 2.3.2).
+
+#. Download the inspire zip release file from the download page of your version of GeoServer
+
+#. Extract the archive and copy the contents into the ``/WEB-INF/lib`` directory.
+
+#. Restart GeoServer.
+
+To verify that the extension was installed successfully, please see the next section on :ref:`inspire_using`.
diff --git a/doc/en/user/source/extensions/inspire/using.rst b/doc/en/user/source/extensions/inspire/using.rst
index 60db98410d6..ff65c3ddc94 100644
--- a/doc/en/user/source/extensions/inspire/using.rst
+++ b/doc/en/user/source/extensions/inspire/using.rst
@@ -1,191 +1,191 @@
-.. _inspire_using:
-
-Using the INSPIRE extension
-===========================
-
-When the INSPIRE extension has been properly installed, the :ref:`services_webadmin_wms`, :ref:`services_webadmin_wfs` and :ref:`services_webadmin_wcs` sections of the :ref:`web_admin` will show an extra INSPIRE configuration section. If the data directory has not been configured with INSPIRE parameters before, this section will just contain a check box to enable the creation of an INSPIRE ExtendedCapabilities element.
-
-.. figure:: images/noinspire.png
- :align: center
-
-.. note:: If you do not see this content in the service configuration pages, the INSPIRE extension may not be installed properly. Reread the section on :ref:`inspire_installing` and verify that the correct file was saved to the correct directory.
-
-Extended WMS and WMTS configuration
------------------------------------
-
-INSPIRE-specific configuration is accessed on the main :ref:`services_webadmin_wms` or WMTS settings page in the :ref:`web_admin`. This is accessed by clicking on the :guilabel:`WMS` or :guilabel:`WMTS` link on the sidebar.
-
-.. note:: You must be logged in as an administrator to edit WMS or WMTS configuration.
-
-Once on the service configuration page, there will be a block titled :guilabel:`INSPIRE`. If you enable the checkbox shown above this section will have three additional settings:
-
-* :guilabel:`Default Language` combo box, for setting the Default
-* :guilabel:`Other Supported Languages` area for setting Supported Languages
-* :guilabel:`Service Metadata URL` field, a URL containing the location of the metadata associated with the service
-* :guilabel:`Service Metadata Type` combo box, for detailing whether the metadata came from a CSW (Catalog Service) or a standalone metadata file
-
-.. figure:: images/inspire.png
- :align: center
-
- *INSPIRE-related options*
-
-After clicking :guilabel:`Submit` on this page, any changes will be immediately reflected in the services (WMS 1.3.0 or WMTS 1.0.0) capabilities document.
-
-.. note:: The :guilabel:`Service Metadata URL` field is mandatory so you will not be allowed to submit a blank value.
-
-.. note:: The :guilabel:`Service Metadata Type` combo box only allows to select the appropriate MIME type for a CSW response or standalone metadata file or to omit a value altogether. If you think other values would be useful you could raise the issue on the :ref:`GeoServer mailing list `. In the meantime it is possible to manually edit the created configuration files as a workaround.
-
-Extended WMS and WMTS Capabilities
-----------------------------------
-
-.. note:: The INSPIRE extension only modifies the WMS 1.3.0 response, so please make sure that you are viewing the correct capabilities document.
-
-The WMS 1.3.0 and WMTS 1.0.0 capabilities document will contain two additional entries in the ``xsi:schemaLocation`` of the root ```` tag once the INSPIRE extension is installed:
-
-* ``http://inspire.ec.europa.eu/schemas/inspire_vs/1.0``
-* ``https://inspire.ec.europa.eu/schemas/inspire_vs/1.0/inspire_vs.xsd``
-
-If you have enabled the check box to create the INSPIRE ExtendedCapabilities element and entered the values described in the previous section then there will also be an additional ExtendedCapabilities block. This tag block shows up in between the tags for ```` and ````. It contains the following information:
-
-* Metadata URL and MIME type
-* Supported Language(s)
-* Response Language
-
-With the example values shown in the above configuration panel, this block would contain the following content::
-
-
-
-
- http://mysite.org/metadata.xml
-
-
- application/vnd.iso.19139+xml
-
-
-
-
- eng
-
- fre
-
-
- eng
-
-
-
-ISNPIRE recommends that every layer offered by a INSPIRE WMTS should use the InspireCRS84Quad grid set which is already configured in GeoServer, but is up to the user to select it when publishing a INSPIRE WMTS layer.
-
-Extended WFS and WCS configuration
-----------------------------------
-
-INSPIRE-specific configuration is accessed on the main :ref:`services_webadmin_wfs` and :ref:`services_webadmin_wcs` pages in the :ref:`web_admin`. These are accessed by clicking on the :guilabel:`WFS` and :guilabel:`WCS` links on the sidebar respectively.
-
-.. note:: You must be logged in as an administrator to edit WFS configuration.
-
-Once on the WFS or WCS configuration page, there will be a block titled :guilabel:`INSPIRE`. If you enable the checkbox shown above this section will have the following additional settings:
-
-* :guilabel:`Language` combo box, for setting the Supported, Default, and Response languages
-* :guilabel:`Other Supported Languages` area for setting Supported Languages
-* :guilabel:`Service Metadata URL` field, a URL containing the location of the metadata associated with the WFS or WCS
-* :guilabel:`Service Metadata Type` combo box, for detailing whether the metadata came from a CSW (Catalog Service) or a standalone metadata file
-* :guilabel:`Spatial dataset identifers` table, where you can specify a code (mandatory), a namespace (optional) and a metadata URL (optional) for each spatial data set the WFS or WCS is offering
-
-.. figure:: images/inspire_wfs.png
- :align: center
-
- *INSPIRE-related options*
-
-After clicking :guilabel:`Submit` on this page, any changes will be immediately reflected in the WFS 1.1 and WFS 2.0 or WCS 2.0 capabilities documents as appropriate.
-
-.. note:: The :guilabel:`Service Metadata URL` field and at least one :guilabel:`Spatial dataset identifers` entry are mandatory so you will not be allowed to submit the page without these.
-
-.. note:: The :guilabel:`Service Metadata Type` combo box only allows to select the appropriate MIME type for a CSW response or standalone metadata file or to omit a value altogether. If you think other values would be useful you could raise the issue on the :ref:`GeoServer mailing list `. In the meantime it is possible to manually edit the created configuration files as a workaround.
-
-Extended WFS and WCS Capabilities
----------------------------------
-
-.. note:: The INSPIRE directive is relevant to WFS 1.1 and 2.0 and WCS 2.0 only, so please make sure that you are viewing the correct capabilities document.
-
-The WFS and WCS capabilities documents will contain two additional entries in the ``xsi:schemaLocation`` of the root element tag once the INSPIRE extension is installed:
-
-* ``https://inspire.ec.europa.eu/schemas/common/1.0/common.xsd``
-* ``https://inspire.ec.europa.eu/schemas/inspire_dls/1.0/inspire_dls.xsd``
-
-If you have enabled the check box to create the INSPIRE ExtendedCapabilities element and entered the values described in the previous section then there will also be an additional ExtendedCapabilities block with the following information:
-
-* Metadata URL and MIME type
-* Supported Language(s)
-* Response Language
-* Spatial data identifier(s)
-
-With the example values shown in the above configuration panel, this block would contain the following content::
-
-
-
-
- http://mysite.org/csw?SERVICE=CSW&REQUEST=GetRecord
-
-
- application/vnd.iso.19139+xml
-
-
-
-
- eng
-
- fre
-
-
- eng
-
-
-
- fc929094-8a30-2617-e044-002128a47908
-
-
- http://metadata.mysite.org/ds
-
-
-
-
-The spatial data identifiers section is mandatory, but cannot be filled by default, it is your duty to provide at least one spatial dataset identifier (see the INSPIRE download service technical guidelines for more information).
-
-Internationalization support
-----------------------------
-
-GeoServer offers the ability to configure GetCapabilities response in multiple languages. Content in different laguages can be requested by using the request parameter `Language`, e.g. `Language=eng`. At the time of writing, the following services support the parameter: WFS 2.0, WMS 1.1 and 1.3, WCS 2.0.
-
-At the time of writing the `INSPIRE Schemas `_ only allow 23 choices for :guilabel:`DefaultLanguage`. The GeoServer INSPIRE extension allows some other languages to be chosen. If you choose one of these your capabilities document won't be Schema valid but, as discussed in :geos:`issue 7388 <7388>`, the INSPIRE Schemas seem to be at fault.
-
-The language list available from the UI is define in a classpath file named ``available_languages.properties`` with the following content::
-
- bul=bg
- cze=cs
- dan=da
- dut=nl
- eng=en
- est=et
- fin=fi
- fre=fr
- hrv=hr
- ice=is
- ger=de
- gle=ga
- gre=el
- gsw=de-CH
- hun=hu
- ita=it
- lav=lv
- lit=lt
- mlt=mt
- nor=nb
- pol=pl
- por=pt
- rum=ro
- slo=sk
- slv=sl
- spa=es
- swe=sv
-
-The entries of the above list represent the available INSPIRE language code matched with the corresponding ``ISO 639-1`` code. The GeoServer internationalization support is based on OWS 2.0, and thus using ISO codes internally. The INSPIRE module maps on the fly the INSPIRE names to ISO codes based on the above property file.
-The property file can be overridden by placing a properties file named ``available_languages.properties`` in the ``inspire`` directory inside the GeoServer data directory.
+.. _inspire_using:
+
+Using the INSPIRE extension
+===========================
+
+When the INSPIRE extension has been properly installed, the :ref:`services_webadmin_wms`, :ref:`services_webadmin_wfs` and :ref:`services_webadmin_wcs` sections of the :ref:`web_admin` will show an extra INSPIRE configuration section. If the data directory has not been configured with INSPIRE parameters before, this section will just contain a check box to enable the creation of an INSPIRE ExtendedCapabilities element.
+
+.. figure:: images/noinspire.png
+ :align: center
+
+.. note:: If you do not see this content in the service configuration pages, the INSPIRE extension may not be installed properly. Reread the section on :ref:`inspire_installing` and verify that the correct file was saved to the correct directory.
+
+Extended WMS and WMTS configuration
+-----------------------------------
+
+INSPIRE-specific configuration is accessed on the main :ref:`services_webadmin_wms` or WMTS settings page in the :ref:`web_admin`. This is accessed by clicking on the :guilabel:`WMS` or :guilabel:`WMTS` link on the sidebar.
+
+.. note:: You must be logged in as an administrator to edit WMS or WMTS configuration.
+
+Once on the service configuration page, there will be a block titled :guilabel:`INSPIRE`. If you enable the checkbox shown above this section will have three additional settings:
+
+* :guilabel:`Default Language` combo box, for setting the Default
+* :guilabel:`Other Supported Languages` area for setting Supported Languages
+* :guilabel:`Service Metadata URL` field, a URL containing the location of the metadata associated with the service
+* :guilabel:`Service Metadata Type` combo box, for detailing whether the metadata came from a CSW (Catalog Service) or a standalone metadata file
+
+.. figure:: images/inspire.png
+ :align: center
+
+ *INSPIRE-related options*
+
+After clicking :guilabel:`Submit` on this page, any changes will be immediately reflected in the services (WMS 1.3.0 or WMTS 1.0.0) capabilities document.
+
+.. note:: The :guilabel:`Service Metadata URL` field is mandatory so you will not be allowed to submit a blank value.
+
+.. note:: The :guilabel:`Service Metadata Type` combo box only allows to select the appropriate MIME type for a CSW response or standalone metadata file or to omit a value altogether. If you think other values would be useful you could raise the issue on the :ref:`GeoServer mailing list `. In the meantime it is possible to manually edit the created configuration files as a workaround.
+
+Extended WMS and WMTS Capabilities
+----------------------------------
+
+.. note:: The INSPIRE extension only modifies the WMS 1.3.0 response, so please make sure that you are viewing the correct capabilities document.
+
+The WMS 1.3.0 and WMTS 1.0.0 capabilities document will contain two additional entries in the ``xsi:schemaLocation`` of the root ```` tag once the INSPIRE extension is installed:
+
+* ``http://inspire.ec.europa.eu/schemas/inspire_vs/1.0``
+* ``https://inspire.ec.europa.eu/schemas/inspire_vs/1.0/inspire_vs.xsd``
+
+If you have enabled the check box to create the INSPIRE ExtendedCapabilities element and entered the values described in the previous section then there will also be an additional ExtendedCapabilities block. This tag block shows up in between the tags for ```` and ````. It contains the following information:
+
+* Metadata URL and MIME type
+* Supported Language(s)
+* Response Language
+
+With the example values shown in the above configuration panel, this block would contain the following content::
+
+
+
+
+ http://mysite.org/metadata.xml
+
+
+ application/vnd.iso.19139+xml
+
+
+
+
+ eng
+
+ fre
+
+
+ eng
+
+
+
+ISNPIRE recommends that every layer offered by a INSPIRE WMTS should use the InspireCRS84Quad grid set which is already configured in GeoServer, but is up to the user to select it when publishing a INSPIRE WMTS layer.
+
+Extended WFS and WCS configuration
+----------------------------------
+
+INSPIRE-specific configuration is accessed on the main :ref:`services_webadmin_wfs` and :ref:`services_webadmin_wcs` pages in the :ref:`web_admin`. These are accessed by clicking on the :guilabel:`WFS` and :guilabel:`WCS` links on the sidebar respectively.
+
+.. note:: You must be logged in as an administrator to edit WFS configuration.
+
+Once on the WFS or WCS configuration page, there will be a block titled :guilabel:`INSPIRE`. If you enable the checkbox shown above this section will have the following additional settings:
+
+* :guilabel:`Language` combo box, for setting the Supported, Default, and Response languages
+* :guilabel:`Other Supported Languages` area for setting Supported Languages
+* :guilabel:`Service Metadata URL` field, a URL containing the location of the metadata associated with the WFS or WCS
+* :guilabel:`Service Metadata Type` combo box, for detailing whether the metadata came from a CSW (Catalog Service) or a standalone metadata file
+* :guilabel:`Spatial dataset identifers` table, where you can specify a code (mandatory), a namespace (optional) and a metadata URL (optional) for each spatial data set the WFS or WCS is offering
+
+.. figure:: images/inspire_wfs.png
+ :align: center
+
+ *INSPIRE-related options*
+
+After clicking :guilabel:`Submit` on this page, any changes will be immediately reflected in the WFS 1.1 and WFS 2.0 or WCS 2.0 capabilities documents as appropriate.
+
+.. note:: The :guilabel:`Service Metadata URL` field and at least one :guilabel:`Spatial dataset identifers` entry are mandatory so you will not be allowed to submit the page without these.
+
+.. note:: The :guilabel:`Service Metadata Type` combo box only allows to select the appropriate MIME type for a CSW response or standalone metadata file or to omit a value altogether. If you think other values would be useful you could raise the issue on the :ref:`GeoServer mailing list `. In the meantime it is possible to manually edit the created configuration files as a workaround.
+
+Extended WFS and WCS Capabilities
+---------------------------------
+
+.. note:: The INSPIRE directive is relevant to WFS 1.1 and 2.0 and WCS 2.0 only, so please make sure that you are viewing the correct capabilities document.
+
+The WFS and WCS capabilities documents will contain two additional entries in the ``xsi:schemaLocation`` of the root element tag once the INSPIRE extension is installed:
+
+* ``https://inspire.ec.europa.eu/schemas/common/1.0/common.xsd``
+* ``https://inspire.ec.europa.eu/schemas/inspire_dls/1.0/inspire_dls.xsd``
+
+If you have enabled the check box to create the INSPIRE ExtendedCapabilities element and entered the values described in the previous section then there will also be an additional ExtendedCapabilities block with the following information:
+
+* Metadata URL and MIME type
+* Supported Language(s)
+* Response Language
+* Spatial data identifier(s)
+
+With the example values shown in the above configuration panel, this block would contain the following content::
+
+
+
+
+ http://mysite.org/csw?SERVICE=CSW&REQUEST=GetRecord
+
+
+ application/vnd.iso.19139+xml
+
+
+
+
+ eng
+
+ fre
+
+
+ eng
+
+
+
+ fc929094-8a30-2617-e044-002128a47908
+
+
+ http://metadata.mysite.org/ds
+
+
+
+
+The spatial data identifiers section is mandatory, but cannot be filled by default, it is your duty to provide at least one spatial dataset identifier (see the INSPIRE download service technical guidelines for more information).
+
+Internationalization support
+----------------------------
+
+GeoServer offers the ability to configure GetCapabilities response in multiple languages. Content in different laguages can be requested by using the request parameter `Language`, e.g. `Language=eng`. At the time of writing, the following services support the parameter: WFS 2.0, WMS 1.1 and 1.3, WCS 2.0.
+
+At the time of writing the `INSPIRE Schemas `_ only allow 23 choices for :guilabel:`DefaultLanguage`. The GeoServer INSPIRE extension allows some other languages to be chosen. If you choose one of these your capabilities document won't be Schema valid but, as discussed in :geos:`issue 7388 <7388>`, the INSPIRE Schemas seem to be at fault.
+
+The language list available from the UI is define in a classpath file named ``available_languages.properties`` with the following content::
+
+ bul=bg
+ cze=cs
+ dan=da
+ dut=nl
+ eng=en
+ est=et
+ fin=fi
+ fre=fr
+ hrv=hr
+ ice=is
+ ger=de
+ gle=ga
+ gre=el
+ gsw=de-CH
+ hun=hu
+ ita=it
+ lav=lv
+ lit=lt
+ mlt=mt
+ nor=nb
+ pol=pl
+ por=pt
+ rum=ro
+ slo=sk
+ slv=sl
+ spa=es
+ swe=sv
+
+The entries of the above list represent the available INSPIRE language code matched with the corresponding ``ISO 639-1`` code. The GeoServer internationalization support is based on OWS 2.0, and thus using ISO codes internally. The INSPIRE module maps on the fly the INSPIRE names to ISO codes based on the above property file.
+The property file can be overridden by placing a properties file named ``available_languages.properties`` in the ``inspire`` directory inside the GeoServer data directory.
diff --git a/doc/en/user/source/extensions/jp2k/index.rst b/doc/en/user/source/extensions/jp2k/index.rst
index c85f76ebccd..5c334425116 100644
--- a/doc/en/user/source/extensions/jp2k/index.rst
+++ b/doc/en/user/source/extensions/jp2k/index.rst
@@ -1,37 +1,37 @@
-.. _jp2k_extension:
-
-JP2K Plugin
-============
-
-GeoServer can leverage the JP2K Geotools plugin to read JP2K coverage formats.
-In case you have a Kakadu license and you have built your set of native libraries,
-you will be able to access the JP2K data with higher performances leveraging on it.
-Otherwise you will use the standard SUN's JP2K.
-See :geotools:`GeoTools JP2K Plugin ` for further information.
-
-
-Installing Kakadu
-*****************
-
-In order for GeoServer to leverage on the Kakadu libraries, the Kakadu binaries must be
-installed through your host system's OS.
-
-If you are on Windows, make sure that the Kakadu DLL files are on your PATH.
-If you are on Linux, be sure to set the LD_LIBRARY_PATH environment variable to be the folder
-where the SOs are extracted.
-
-
-Once these steps have been completed, restart GeoServer.
-If done correctly, new data formats will be in the Raster Data Sources list when creating a new data store:
-
-
-.. figure:: images/datasets.png
- :align: center
-
- *Raster Data Source*
-
-
-.. figure:: images/jp2k.png
- :align: center
-
- *Configuring a JP2K data store*
+.. _jp2k_extension:
+
+JP2K Plugin
+============
+
+GeoServer can leverage the JP2K Geotools plugin to read JP2K coverage formats.
+In case you have a Kakadu license and you have built your set of native libraries,
+you will be able to access the JP2K data with higher performances leveraging on it.
+Otherwise you will use the standard SUN's JP2K.
+See :geotools:`GeoTools JP2K Plugin ` for further information.
+
+
+Installing Kakadu
+*****************
+
+In order for GeoServer to leverage on the Kakadu libraries, the Kakadu binaries must be
+installed through your host system's OS.
+
+If you are on Windows, make sure that the Kakadu DLL files are on your PATH.
+If you are on Linux, be sure to set the LD_LIBRARY_PATH environment variable to be the folder
+where the SOs are extracted.
+
+
+Once these steps have been completed, restart GeoServer.
+If done correctly, new data formats will be in the Raster Data Sources list when creating a new data store:
+
+
+.. figure:: images/datasets.png
+ :align: center
+
+ *Raster Data Source*
+
+
+.. figure:: images/jp2k.png
+ :align: center
+
+ *Configuring a JP2K data store*
diff --git a/doc/en/user/source/extensions/monitoring/audit.rst b/doc/en/user/source/extensions/monitoring/audit.rst
index 6d25d5bfc98..9cf00de6757 100644
--- a/doc/en/user/source/extensions/monitoring/audit.rst
+++ b/doc/en/user/source/extensions/monitoring/audit.rst
@@ -1,137 +1,137 @@
-.. _monitor_audit:
-
-Audit Logging
-=============
-
-The history mode logs all requests into a database. This can put a very significant strain
-on the database and can lead to insertion issues as the request table begins to host
-millions of records.
-
-As an alternative to the history mode it's possible to enable the auditing logger, which will log
-the details of each request in a file, which is periodically rolled. Secondary applications can
-then process these log files and built ad-hoc summaries off line.
-
-Configuration
--------------
-
-The ``monitor.properties`` file can contain the following items to enable and configure file auditing::
-
- audit.enabled=true
- audit.path=/path/to/the/logs/directory
- audit.roll_limit=20
-
-The ``audit.enable`` is used to turn on the logger (it is off by default).
-The ``audit.path`` is the directory where the log files will be created.
-The ``audit.roll_limit`` is the number of requests logged into a file before rolling happens.
-The files are also automatically rolled at the beginning of each day.
-
-In clustered installations with a shared data directory the audit path will need to be different
-for each node. In this case it's possible to specify the audit path by using a JVM system variable,
-add the following to the JVM startup options and it will override whatever is specified in
-``monitor.properties``:
-
- -DGEOSERVER_AUDIT_PATH=/path/to/the/logs/directory
-
-Log Files
----------
-
-The log directory will contain a number of log files following the ``geoserver_audit_yyyymmdd_nn.log``
-pattern. The ``nn`` is increased at each roll of the file. The contents of the log directory will look like::
-
- geoserver_audit_20110811_2.log
- geoserver_audit_20110811_3.log
- geoserver_audit_20110811_4.log
- geoserver_audit_20110811_5.log
- geoserver_audit_20110811_6.log
- geoserver_audit_20110811_7.log
- geoserver_audit_20110811_8.log
-
-By default each log file contents will be a xml document looking like the following::
-
-
-
-
- WMS
- 1.1.1
- GetMap
-
- GeoSolutions:elba-deparea
- 4
- 0
- /GeoSolutions/wms
- LAYERS=GeoSolutions:elba-deparea&STYLES=&FORMAT=image/png&TILED=true&TILESORIGIN=9.916,42.312&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&EXCEPTIONS=application/vnd.ogc.se_inimage&SRS=EPSG:4326&BBOX=9.58375,42.64425,9.916,42.9765&WIDTH=256&HEIGHT=256
- GET
- 2011-08-11T20:19:28.277Z
- 2011-08-11T20:19:28.29Z
- 13
- 192.168.1.5
- 192.168.1.5
- demo1.geo-solutions.it
- admin
- 200
- 1670
- image/png
- false
-
- ...
-
-
-Customizing Log Contents
-------------------------
-
-The log contents are driven by three FreeMarker templates.
-
-``header.ftl`` is used once when a new log file is created to form the first few lines of the file.
-The default header template is::
-
-
-
-
-``content.ftl`` is used to write out the request details. The default template dumps all the known fields about the request::
-
- <#escape x as x?xml>
-
- ${service!""}
- ${owsVersion!""}
- ${operation!""}
- ${subOperation!""}
- ${resourcesList!""}
- ${resourcesProcessingTimeList!""}
- ${labellingProcessingTime!""}
- ${path!""}
- ${queryString!""}
- <#if bodyAsString??>
-
- ${bodyAsString}
-
- #if>
- ${httpMethod!""}
- ${startTime?datetime?iso_utc_ms}
- ${endTime?datetime?iso_utc_ms}
- ${totalTime}
- ${remoteAddr!""}
- ${remoteHost!""}
- ${host}
- ${remoteUser!""}
- ${responseStatus!""}
- ${responseLength?c}
- ${responseContentType!""}
- ${cacheResult!""}
- ${missReason!""}
- <#if error??>
- true
- ${errorMessage!""}
- <#else>
- false
- #if>
-
- #escape>
-
-
-``footer.ftl`` is executed just once when the log file is closed to build the last few lines of the file.
-The default footer template is::
-
-
-
-The administrator is free to provide alternate templates, they can be placed in the same directory
+.. _monitor_audit:
+
+Audit Logging
+=============
+
+The history mode logs all requests into a database. This can put a very significant strain
+on the database and can lead to insertion issues as the request table begins to host
+millions of records.
+
+As an alternative to the history mode it's possible to enable the auditing logger, which will log
+the details of each request in a file, which is periodically rolled. Secondary applications can
+then process these log files and built ad-hoc summaries off line.
+
+Configuration
+-------------
+
+The ``monitor.properties`` file can contain the following items to enable and configure file auditing::
+
+ audit.enabled=true
+ audit.path=/path/to/the/logs/directory
+ audit.roll_limit=20
+
+The ``audit.enable`` is used to turn on the logger (it is off by default).
+The ``audit.path`` is the directory where the log files will be created.
+The ``audit.roll_limit`` is the number of requests logged into a file before rolling happens.
+The files are also automatically rolled at the beginning of each day.
+
+In clustered installations with a shared data directory the audit path will need to be different
+for each node. In this case it's possible to specify the audit path by using a JVM system variable,
+add the following to the JVM startup options and it will override whatever is specified in
+``monitor.properties``:
+
+ -DGEOSERVER_AUDIT_PATH=/path/to/the/logs/directory
+
+Log Files
+---------
+
+The log directory will contain a number of log files following the ``geoserver_audit_yyyymmdd_nn.log``
+pattern. The ``nn`` is increased at each roll of the file. The contents of the log directory will look like::
+
+ geoserver_audit_20110811_2.log
+ geoserver_audit_20110811_3.log
+ geoserver_audit_20110811_4.log
+ geoserver_audit_20110811_5.log
+ geoserver_audit_20110811_6.log
+ geoserver_audit_20110811_7.log
+ geoserver_audit_20110811_8.log
+
+By default each log file contents will be a xml document looking like the following::
+
+
+
+
+ WMS
+ 1.1.1
+ GetMap
+
+ GeoSolutions:elba-deparea
+ 4
+ 0
+ /GeoSolutions/wms
+ LAYERS=GeoSolutions:elba-deparea&STYLES=&FORMAT=image/png&TILED=true&TILESORIGIN=9.916,42.312&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&EXCEPTIONS=application/vnd.ogc.se_inimage&SRS=EPSG:4326&BBOX=9.58375,42.64425,9.916,42.9765&WIDTH=256&HEIGHT=256
+ GET
+ 2011-08-11T20:19:28.277Z
+ 2011-08-11T20:19:28.29Z
+ 13
+ 192.168.1.5
+ 192.168.1.5
+ demo1.geo-solutions.it
+ admin
+ 200
+ 1670
+ image/png
+ false
+
+ ...
+
+
+Customizing Log Contents
+------------------------
+
+The log contents are driven by three FreeMarker templates.
+
+``header.ftl`` is used once when a new log file is created to form the first few lines of the file.
+The default header template is::
+
+
+
+
+``content.ftl`` is used to write out the request details. The default template dumps all the known fields about the request::
+
+ <#escape x as x?xml>
+
+ ${service!""}
+ ${owsVersion!""}
+ ${operation!""}
+ ${subOperation!""}
+ ${resourcesList!""}
+ ${resourcesProcessingTimeList!""}
+ ${labellingProcessingTime!""}
+ ${path!""}
+ ${queryString!""}
+ <#if bodyAsString??>
+
+ ${bodyAsString}
+
+ #if>
+ ${httpMethod!""}
+ ${startTime?datetime?iso_utc_ms}
+ ${endTime?datetime?iso_utc_ms}
+ ${totalTime}
+ ${remoteAddr!""}
+ ${remoteHost!""}
+ ${host}
+ ${remoteUser!""}
+ ${responseStatus!""}
+ ${responseLength?c}
+ ${responseContentType!""}
+ ${cacheResult!""}
+ ${missReason!""}
+ <#if error??>
+ true
+ ${errorMessage!""}
+ <#else>
+ false
+ #if>
+
+ #escape>
+
+
+``footer.ftl`` is executed just once when the log file is closed to build the last few lines of the file.
+The default footer template is::
+
+
+
+The administrator is free to provide alternate templates, they can be placed in the same directory
as ``monitor.properties``, with the same names as above. GeoServer will pick them up automatically.
\ No newline at end of file
diff --git a/doc/en/user/source/extensions/monitoring/index.rst b/doc/en/user/source/extensions/monitoring/index.rst
index 8c1bbb85ee9..d766b8a05a1 100644
--- a/doc/en/user/source/extensions/monitoring/index.rst
+++ b/doc/en/user/source/extensions/monitoring/index.rst
@@ -1,25 +1,25 @@
-.. _monitor_extension:
-
-Monitoring
-==========
-
-The monitor extension tracks requests made against a GeoServer instance. With the
-extension request data can be persisted to a database, used to generate simple reports
-, and routed to a customized request audit log.
-
-To get the extension proceed to :ref:`monitor_installation`. To learn more about how
-it works jump to the :ref:`monitor_overview` section.
-
-
-.. toctree::
- :maxdepth: 2
-
- installation/
- overview/
- reference/
- configuration/
- audit/
- query/
- geoip/
-
-
+.. _monitor_extension:
+
+Monitoring
+==========
+
+The monitor extension tracks requests made against a GeoServer instance. With the
+extension request data can be persisted to a database, used to generate simple reports
+, and routed to a customized request audit log.
+
+To get the extension proceed to :ref:`monitor_installation`. To learn more about how
+it works jump to the :ref:`monitor_overview` section.
+
+
+.. toctree::
+ :maxdepth: 2
+
+ installation/
+ overview/
+ reference/
+ configuration/
+ audit/
+ query/
+ geoip/
+
+
diff --git a/doc/en/user/source/extensions/ogr.rst b/doc/en/user/source/extensions/ogr.rst
index a8c5449bd9e..7c12d220a3f 100644
--- a/doc/en/user/source/extensions/ogr.rst
+++ b/doc/en/user/source/extensions/ogr.rst
@@ -1,214 +1,214 @@
-.. _ogr_extension:
-
-OGR based WFS Output Format
-============================
-
-The ogr2ogr based output format leverages the availability of the ogr2ogr command to allow the generation of more output formats than GeoServer can natively produce.
-The basics idea is to dump to the file system a file that ogr2ogr can translate, invoke it, zip and return the output of the translation.
-
-Out of the box behaviour
-------------------------
-
-Out of the box the plugin assumes the following:
-
-* ogr2ogr is available in the path
-* the GDAL_DATA variable is pointing to the GDAL data directory (which stores the spatial reference information for GDAL)
-
-In the default configuration the following formats are supported:
-
-* MapInfo in TAB format
-* MapInfo in MIF format
-* Un-styled KML
-* CSV (without geometry data dumps)
-
-The list might be shorter if ogr2ogr has not been built with support for the above formats.
-
-Once installed in GeoServer four new GetFeature output formats will be available, in particular, ``OGR-TAB``, ``OGR-MIF``, ``OGR-KML``, ``OGR-CSV``.
-
-ogr2ogr conversion abilities
-----------------------------
-
-The ogr2ogr utility is usually able to convert more formats than the default setup of this output format allows for, but the exact list depends on how the utility was built from sources. To get a full list of the formats available by your ogr2ogr build just run::
-
- ogr2ogr --help
-
-and you'll get the full set of options usable by the program, along with the supported formats. For example, the above produces the following output using the FWTools 2.2.8 distribution (which includes ogr2ogr among other useful information and conversion tools)::
-
- Usage: ogr2ogr [--help-general] [-skipfailures] [-append] [-update] [-gt n]
- [-select field_list] [-where restricted_where]
- [-sql ]
- [-spat xmin ymin xmax ymax] [-preserve_fid] [-fid FID]
- [-a_srs srs_def] [-t_srs srs_def] [-s_srs srs_def]
- [-f format_name] [-overwrite] [[-dsco NAME=VALUE] ...]
- [-segmentize max_dist]
- dst_datasource_name src_datasource_name
- [-lco NAME=VALUE] [-nln name] [-nlt type] [layer [layer ...]]
-
- -f format_name: output file format name, possible values are:
- -f "ESRI Shapefile"
- -f "MapInfo File"
- -f "TIGER"
- -f "S57"
- -f "DGN"
- -f "Memory"
- -f "BNA"
- -f "CSV"
- -f "GML"
- -f "GPX"
- -f "KML"
- -f "GeoJSON"
- -f "Interlis 1"
- -f "Interlis 2"
- -f "GMT"
- -f "SQLite"
- -f "ODBC"
- -f "PostgreSQL"
- -f "MySQL"
- -f "Geoconcept"
- -append: Append to existing layer instead of creating new if it exists
- -overwrite: delete the output layer and recreate it empty
- -update: Open existing output datasource in update mode
- -select field_list: Comma-delimited list of fields from input layer to
- copy to the new layer (defaults to all)
- -where restricted_where: Attribute query (like SQL WHERE)
- -sql statement: Execute given SQL statement and save result.
- -skipfailures: skip features or layers that fail to convert
- -gt n: group n features per transaction (default 200)
- -spat xmin ymin xmax ymax: spatial query extents
- -segmentize max_dist: maximum distance between 2 nodes.
- Used to create intermediate points
- -dsco NAME=VALUE: Dataset creation option (format specific)
- -lco NAME=VALUE: Layer creation option (format specific)
- -nln name: Assign an alternate name to the new layer
- -nlt type: Force a geometry type for new layer. One of NONE, GEOMETRY,
- POINT, LINESTRING, POLYGON, GEOMETRYCOLLECTION, MULTIPOINT,
- MULTIPOLYGON, or MULTILINESTRING. Add "25D" for 3D layers.
- Default is type of source layer.
- -a_srs srs_def: Assign an output SRS
- -t_srs srs_def: Reproject/transform to this SRS on output
- -s_srs srs_def: Override source SRS
-
- Srs_def can be a full WKT definition (hard to escape properly),
- or a well known definition (ie. EPSG:4326) or a file with a WKT
- definition.
-
-The full list of formats that ogr2ogr is able to support is available on the `OGR site `_. Mind that this output format can handle only outputs that are file based and that do support creation. So, for example, you won't be able to use the Postgres output (since it's database based) or the ArcInfo binary coverage (creation not supported).
-
-Customisation
--------------
-
-If ogr2ogr is not available in the default path, the GDAL_DATA is not set, or if the output formats needs tweaking, a ``ogr2ogr.xml`` file can be put in the root of the GeoServer data directory to customize the output format.
-
-The default GeoServer configuration is equivalent to the following xml file:
-
-.. code-block:: xml
-
-
- ogr2ogr
-
-
-
- MapInfo File
- OGR-TAB
- .tab
-
-
- MapInfo File
- OGR-MIF
- .mif
-
-
-
-
- CSV
- OGR-CSV
- .csv
- true
- text/csv
-
-
- KML
- OGR-KML
- .kml
- true
- application/vnd.google-earth.kml
-
-
-
-
-The file showcases all possible usage of the configuration elements:
-
-* ``ogr2ogrLocation`` can be just ogr2ogr if the command is in the path, otherwise it should be the full path to the executable. For example, on a Windows box with FWTools installed it might be::
-
- c:\Programmi\FWTools2.2.8\bin\ogr2ogr.exe
-
-* ``gdalData`` must point to the GDAL data directory. For example, on a Windows box with FWTools installed it might be::
-
- c:\Programmi\FWTools2.2.8\data
-
-* ``Format`` defines a single format, which is defined by the following tags:
-
- * ``ogrFormat``: the name of the format to be passed to ogr2ogr with the -f option (it's case sensitive).
- * ``formatName``: is the name of the output format as advertised by GeoServer
- * ``fileExtension``: is the extension of the file generated after the translation, if any (can be omitted)
- * ``option``: can be used to add one or more options to the ogr2ogr command line. As you can see by the MIF example, each item must be contained in its own tag. You can get a full list of options by running ogr2ogr --help or by visiting the ogr2ogr web page. Also consider that each format supports specific creation options, listed in the description page for each format (for example, here is the MapInfo one).
- * ``singleFile`` (since 2.0.3): if true the output of the conversion is supposed to be a single file that can be streamed directly back without the need to wrap it into a zip file
- * ``mimeType`` (since 2.0.3): the mime type of the file returned when using ``singleFile``. If not specified ``application/octet-stream`` will be used as a default.
-
-OGR based WPS Output Format
-===========================
-
-The OGR based WPS output format provides the ability to turn feature collection (vector layer) output types into formats supported by OGR,
-using the same configuration and same machinery provided by the OGR WFS output format (which should also be installed for the WPS portion to work).
-
-Unlike the WFS case the WPS output formats are receiving different treatment in WPS responses depending on whether they are binary, text, or xml, when the Execute response
-style chosen by the client is "document":
-
-* Binary types need to be base64 encoded for XML embedding
-* Text types need to be included inside a CDATA section
-* XML types can be integrated in the response as-is
-
-In order to understand the nature of the output format a new optional configuration element, ````, can
-be added to the ``ogr2ogr.xml`` configuration file in order to specify the output nature.
-The possible values are ``binary``, ``text``, ``xml``, in case the value is missing, ``binary`` is assumed.
-Here is an example showing all possible combinations:
-
-.. code-block:: xml
-
-
- ogr2ogr
-
-
-
- MapInfo File
- OGR-TAB
- .tab
- binary
-
-
- MapInfo File
- OGR-MIF
- .mif
-
-
-
-
- CSV
- OGR-CSV
- .csv
- true
- text/csv
-
-
- text
-
-
- KML
- OGR-KML
- .kml
- true
- application/vnd.google-earth.kml
- xml
-
-
-
+.. _ogr_extension:
+
+OGR based WFS Output Format
+============================
+
+The ogr2ogr based output format leverages the availability of the ogr2ogr command to allow the generation of more output formats than GeoServer can natively produce.
+The basics idea is to dump to the file system a file that ogr2ogr can translate, invoke it, zip and return the output of the translation.
+
+Out of the box behaviour
+------------------------
+
+Out of the box the plugin assumes the following:
+
+* ogr2ogr is available in the path
+* the GDAL_DATA variable is pointing to the GDAL data directory (which stores the spatial reference information for GDAL)
+
+In the default configuration the following formats are supported:
+
+* MapInfo in TAB format
+* MapInfo in MIF format
+* Un-styled KML
+* CSV (without geometry data dumps)
+
+The list might be shorter if ogr2ogr has not been built with support for the above formats.
+
+Once installed in GeoServer four new GetFeature output formats will be available, in particular, ``OGR-TAB``, ``OGR-MIF``, ``OGR-KML``, ``OGR-CSV``.
+
+ogr2ogr conversion abilities
+----------------------------
+
+The ogr2ogr utility is usually able to convert more formats than the default setup of this output format allows for, but the exact list depends on how the utility was built from sources. To get a full list of the formats available by your ogr2ogr build just run::
+
+ ogr2ogr --help
+
+and you'll get the full set of options usable by the program, along with the supported formats. For example, the above produces the following output using the FWTools 2.2.8 distribution (which includes ogr2ogr among other useful information and conversion tools)::
+
+ Usage: ogr2ogr [--help-general] [-skipfailures] [-append] [-update] [-gt n]
+ [-select field_list] [-where restricted_where]
+ [-sql ]
+ [-spat xmin ymin xmax ymax] [-preserve_fid] [-fid FID]
+ [-a_srs srs_def] [-t_srs srs_def] [-s_srs srs_def]
+ [-f format_name] [-overwrite] [[-dsco NAME=VALUE] ...]
+ [-segmentize max_dist]
+ dst_datasource_name src_datasource_name
+ [-lco NAME=VALUE] [-nln name] [-nlt type] [layer [layer ...]]
+
+ -f format_name: output file format name, possible values are:
+ -f "ESRI Shapefile"
+ -f "MapInfo File"
+ -f "TIGER"
+ -f "S57"
+ -f "DGN"
+ -f "Memory"
+ -f "BNA"
+ -f "CSV"
+ -f "GML"
+ -f "GPX"
+ -f "KML"
+ -f "GeoJSON"
+ -f "Interlis 1"
+ -f "Interlis 2"
+ -f "GMT"
+ -f "SQLite"
+ -f "ODBC"
+ -f "PostgreSQL"
+ -f "MySQL"
+ -f "Geoconcept"
+ -append: Append to existing layer instead of creating new if it exists
+ -overwrite: delete the output layer and recreate it empty
+ -update: Open existing output datasource in update mode
+ -select field_list: Comma-delimited list of fields from input layer to
+ copy to the new layer (defaults to all)
+ -where restricted_where: Attribute query (like SQL WHERE)
+ -sql statement: Execute given SQL statement and save result.
+ -skipfailures: skip features or layers that fail to convert
+ -gt n: group n features per transaction (default 200)
+ -spat xmin ymin xmax ymax: spatial query extents
+ -segmentize max_dist: maximum distance between 2 nodes.
+ Used to create intermediate points
+ -dsco NAME=VALUE: Dataset creation option (format specific)
+ -lco NAME=VALUE: Layer creation option (format specific)
+ -nln name: Assign an alternate name to the new layer
+ -nlt type: Force a geometry type for new layer. One of NONE, GEOMETRY,
+ POINT, LINESTRING, POLYGON, GEOMETRYCOLLECTION, MULTIPOINT,
+ MULTIPOLYGON, or MULTILINESTRING. Add "25D" for 3D layers.
+ Default is type of source layer.
+ -a_srs srs_def: Assign an output SRS
+ -t_srs srs_def: Reproject/transform to this SRS on output
+ -s_srs srs_def: Override source SRS
+
+ Srs_def can be a full WKT definition (hard to escape properly),
+ or a well known definition (ie. EPSG:4326) or a file with a WKT
+ definition.
+
+The full list of formats that ogr2ogr is able to support is available on the `OGR site `_. Mind that this output format can handle only outputs that are file based and that do support creation. So, for example, you won't be able to use the Postgres output (since it's database based) or the ArcInfo binary coverage (creation not supported).
+
+Customisation
+-------------
+
+If ogr2ogr is not available in the default path, the GDAL_DATA is not set, or if the output formats needs tweaking, a ``ogr2ogr.xml`` file can be put in the root of the GeoServer data directory to customize the output format.
+
+The default GeoServer configuration is equivalent to the following xml file:
+
+.. code-block:: xml
+
+
+ ogr2ogr
+
+
+
+ MapInfo File
+ OGR-TAB
+ .tab
+
+
+ MapInfo File
+ OGR-MIF
+ .mif
+
+
+
+
+ CSV
+ OGR-CSV
+ .csv
+ true
+ text/csv
+
+
+ KML
+ OGR-KML
+ .kml
+ true
+ application/vnd.google-earth.kml
+
+
+
+
+The file showcases all possible usage of the configuration elements:
+
+* ``ogr2ogrLocation`` can be just ogr2ogr if the command is in the path, otherwise it should be the full path to the executable. For example, on a Windows box with FWTools installed it might be::
+
+ c:\Programmi\FWTools2.2.8\bin\ogr2ogr.exe
+
+* ``gdalData`` must point to the GDAL data directory. For example, on a Windows box with FWTools installed it might be::
+
+ c:\Programmi\FWTools2.2.8\data
+
+* ``Format`` defines a single format, which is defined by the following tags:
+
+ * ``ogrFormat``: the name of the format to be passed to ogr2ogr with the -f option (it's case sensitive).
+ * ``formatName``: is the name of the output format as advertised by GeoServer
+ * ``fileExtension``: is the extension of the file generated after the translation, if any (can be omitted)
+ * ``option``: can be used to add one or more options to the ogr2ogr command line. As you can see by the MIF example, each item must be contained in its own tag. You can get a full list of options by running ogr2ogr --help or by visiting the ogr2ogr web page. Also consider that each format supports specific creation options, listed in the description page for each format (for example, here is the MapInfo one).
+ * ``singleFile`` (since 2.0.3): if true the output of the conversion is supposed to be a single file that can be streamed directly back without the need to wrap it into a zip file
+ * ``mimeType`` (since 2.0.3): the mime type of the file returned when using ``singleFile``. If not specified ``application/octet-stream`` will be used as a default.
+
+OGR based WPS Output Format
+===========================
+
+The OGR based WPS output format provides the ability to turn feature collection (vector layer) output types into formats supported by OGR,
+using the same configuration and same machinery provided by the OGR WFS output format (which should also be installed for the WPS portion to work).
+
+Unlike the WFS case the WPS output formats are receiving different treatment in WPS responses depending on whether they are binary, text, or xml, when the Execute response
+style chosen by the client is "document":
+
+* Binary types need to be base64 encoded for XML embedding
+* Text types need to be included inside a CDATA section
+* XML types can be integrated in the response as-is
+
+In order to understand the nature of the output format a new optional configuration element, ````, can
+be added to the ``ogr2ogr.xml`` configuration file in order to specify the output nature.
+The possible values are ``binary``, ``text``, ``xml``, in case the value is missing, ``binary`` is assumed.
+Here is an example showing all possible combinations:
+
+.. code-block:: xml
+
+
+ ogr2ogr
+
+
+
+ MapInfo File
+ OGR-TAB
+ .tab
+ binary
+
+
+ MapInfo File
+ OGR-MIF
+ .mif
+
+
+
+
+ CSV
+ OGR-CSV
+ .csv
+ true
+ text/csv
+
+
+ text
+
+
+ KML
+ OGR-KML
+ .kml
+ true
+ application/vnd.google-earth.kml
+ xml
+
+
+
diff --git a/doc/en/user/source/extensions/params-extractor/usage.rst b/doc/en/user/source/extensions/params-extractor/usage.rst
index de6e804093f..05b689d95e8 100644
--- a/doc/en/user/source/extensions/params-extractor/usage.rst
+++ b/doc/en/user/source/extensions/params-extractor/usage.rst
@@ -1,212 +1,212 @@
-.. _params_extractor_usage:
-
-Using the Parameters Extractor module
-=====================================
-
-This module allow us to entering specific request parameters as URL path fragments instead of using the query string.
-For example, we want to be able to apply a cql_filter using a URL in the following form::
-
- /geoserver////ows?service=WMS&version=1.3.0&request=GetMap
-
-As a simple example of usage, if the is something like::
-
- K_140M
-
-the URL would become::
-
- /geoserver///K_140M/ows?service=WMS&version=1.3.0&request=GetMap
-
-and this module will translate the URL to this new one::
-
- /geoserver///ows?service=WMS&version=1.3.0&request=GetMap&cql_filter=seq='K140M'
-
-This module is configured by a set of rules that will be applied to the incoming URLs. Note that a get capabilities result will include the original URL maintaining the extra filter.
-
-This module also gives the possibility to echo existing URL parameters to the result of a get capabilities result. As an example, by default the following get capabilities request (note the existing cql_filter parameter)::
-
- /geoserver/ows?service=wms&version=1.3.0&request=GetCapabilities&cql_filter=CFCC=%27D68%27
-
-will return a get capabilities document were the URLs will be of the type::
-
- /geoserver/ows?SERVICE=WMS&
-
-if this module is configured to echo an existing cql_filter parameter the result would be::
-
- /geoserver/ows?SERVICE=WMS&CQL_FILTER=CFCC%3D%27D68%27&
-
-This module is configured using three types of rules: echo parameter rules, basic rules and advanced rules. All of them can be managed in this module UI which is integrated in GeoServer UI.
-
-
-Echo Parameter Rules
------------------------------------
-
-Echo parameter rules are very simple, they allow us to define that a certain existing URL parameter should be echoed to a get capabilities result. This type of rules only required one mandatory parameter which is the name of the existing URL parameter that should be echoed to a get capabilities result.
-
-Example of an echo parameter rule:
-
-.. figure:: images/echo_rule.png
- :align: center
-
- *Example of a echo parameter rule defined in the UI*
-
-This rule will echo the cql_filter of this URL::
-
- /geoserver/ows?service=wms&version=1.3.0&request=GetCapabilities&cql_filter=CFCC=%27D68%27
-
-to the get capabilities result::
-
- /geoserver/ows?SERVICE=WMS&CQL_FILTER=CFCC%3D%27D68%27&
-
-Basic Rules
------------------------------------
-
-Basic rules allow us to handle simple uses cases where we only want to extract a parameter from the URL.
-
-A basic rule is defined by three mandatory attributes:
-
-.. list-table::
- :widths: 20 80
-
- * - **Attribute**
- - **Description**
- * - ``Position``
- - The position of the URL base path element to be selected
- * - ``Parameter``
- - The name of the parameter produced by this rule
- * - ``Transform``
- - Expression that defines the value of the parameter, use {PARAMETER} as a placeholder for the selected path element
-
-For commodity is also possible when defining this type of rules to configure that an existing parameter in the URL should be echoed to a get capabilities result.
-
-Example of a basic rule:
-
-.. figure:: images/basic_rule.png
- :align: center
-
- *Example of a basic rule defined in the UI*
-
-This rule will transform the URL::
-
- /geoserver/tiger/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap
-
-in::
-
- /geoserver/tiger/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27H11%27
-
-Advanced Rules
------------------------------------
-
-Advanced rules allow us to handle more complex uses cases where more flexibility is required.
-
-An advanced rule is defined by three mandatory attributes and four optional ones:
-
-.. list-table::
- :widths: 10 80 10
-
- * - **Attribute**
- - **Description**
- - **Mandatory**
- * - ``Match``
- - Regex match expression with groups, for example ^(?:/[^/]*){3}(/([^/]+)).*$ selects the URL base path third element
- - Yes
- * - ``Activation``
- - If defined this rule will only be applied to URLs that match this regex expression
- - No
- * - ``Parameter``
- - The name of the parameter produced by this rule
- - Yes
- * - ``Transform``
- - Expression that defines the value of the parameter, use $1 ... $n as placeholders for groups defined in the match expression
- - Yes
- * - ``Remove``
- - The match expression group to be removed from URL, by default 1
- - No
- * - ``Combine``
- - Defines how to combine parameter existing value ($1 existing value, $2 new value), by default the value is overridden
- - No
- * - ``Repeat``
- - If defined, Combine is applied not only once, but for every layer included in the LAYERS parameter, this allows filling parameters that require a value for each layer (e.g. STYLES or CQL_FILTER)
- - No
-
-For commodity is also possible when defining this type of rules to configure that an existing parameter in the URL should be echoed to a get capabilities result.
-
-Example of an advanced rule:
-
-.. figure:: images/advanced_rule.png
- :align: center
-
- *Example of an advanced rule defined in the UI*
-
-This rule will transform the URL::
-
- /geoserver/tiger/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27D68%27
-
-in::
-
- /geoserver/tiger/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27D68%27+or+CFCC%3D%27H11%27
-
-No that this rule will also echo an existing cql_filter parameter to the get capabilities result.
-
-Example of an advanced rule with repeat:
-
-.. figure:: images/advanced_rule_repeat.png
- :align: center
-
- *Example of an advanced rule with repeat defined in the UI*
-
-This rule will transform the URL::
-
- /geoserver/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=tiger,other
-
-in::
-
- /geoserver/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=tiger,otherCQL_FILTER=CFCC%3D%27D68%27%3BCFCC%3D%27H11%27
-
-Rules Management
------------------------------
-
-Rules can be managed and tested in the rules management UI. Besides the basic operations like add, remove and update is also possible to activate or deactivate rules. A deactivated rule will be ignored by this module.
-
-Follow a print screen of the rules management UI with all the rules previously defined:
-
-.. figure:: images/rules_management.png
- :align: center
-
- *Rules management UI*
-
-Note that the first rule (the advanced one) is not active.
-
-REST API
---------
-
-The rules and echo parameters can also be managed by means of a REST API found at
-``geoserver/rest/params-extractor``. Documentation for it is available in
-:api:`Swagger format `
-
-Intercepting the security filters chain
----------------------------------------
-By default, the params-extractor module does not interact with the security authentication filters.
-This is because the params-extractor filter is called later in the GeoServer filters chain.
-
-If you want params-extractor to work before the security filter chain, you have to configure it as
-a standard servlet filter in the GeoServer WEB-INF/web.xml file.
-
-This can be done adding the following to your current web.xml (immediately after the ``Set Character Encoding`` filter) and restarting GeoServer:
-
- .. code-block:: xml
-
-
-
- ...
-
- ExtractParams
- org.geoserver.params.extractor.Filter
-
- ...
-
- ExtractParams
- /*
-
- ...
-
+.. _params_extractor_usage:
+
+Using the Parameters Extractor module
+=====================================
+
+This module allow us to entering specific request parameters as URL path fragments instead of using the query string.
+For example, we want to be able to apply a cql_filter using a URL in the following form::
+
+ /geoserver////ows?service=WMS&version=1.3.0&request=GetMap
+
+As a simple example of usage, if the is something like::
+
+ K_140M
+
+the URL would become::
+
+ /geoserver///K_140M/ows?service=WMS&version=1.3.0&request=GetMap
+
+and this module will translate the URL to this new one::
+
+ /geoserver///ows?service=WMS&version=1.3.0&request=GetMap&cql_filter=seq='K140M'
+
+This module is configured by a set of rules that will be applied to the incoming URLs. Note that a get capabilities result will include the original URL maintaining the extra filter.
+
+This module also gives the possibility to echo existing URL parameters to the result of a get capabilities result. As an example, by default the following get capabilities request (note the existing cql_filter parameter)::
+
+ /geoserver/ows?service=wms&version=1.3.0&request=GetCapabilities&cql_filter=CFCC=%27D68%27
+
+will return a get capabilities document were the URLs will be of the type::
+
+ /geoserver/ows?SERVICE=WMS&
+
+if this module is configured to echo an existing cql_filter parameter the result would be::
+
+ /geoserver/ows?SERVICE=WMS&CQL_FILTER=CFCC%3D%27D68%27&
+
+This module is configured using three types of rules: echo parameter rules, basic rules and advanced rules. All of them can be managed in this module UI which is integrated in GeoServer UI.
+
+
+Echo Parameter Rules
+-----------------------------------
+
+Echo parameter rules are very simple, they allow us to define that a certain existing URL parameter should be echoed to a get capabilities result. This type of rules only required one mandatory parameter which is the name of the existing URL parameter that should be echoed to a get capabilities result.
+
+Example of an echo parameter rule:
+
+.. figure:: images/echo_rule.png
+ :align: center
+
+ *Example of a echo parameter rule defined in the UI*
+
+This rule will echo the cql_filter of this URL::
+
+ /geoserver/ows?service=wms&version=1.3.0&request=GetCapabilities&cql_filter=CFCC=%27D68%27
+
+to the get capabilities result::
+
+ /geoserver/ows?SERVICE=WMS&CQL_FILTER=CFCC%3D%27D68%27&
+
+Basic Rules
+-----------------------------------
+
+Basic rules allow us to handle simple uses cases where we only want to extract a parameter from the URL.
+
+A basic rule is defined by three mandatory attributes:
+
+.. list-table::
+ :widths: 20 80
+
+ * - **Attribute**
+ - **Description**
+ * - ``Position``
+ - The position of the URL base path element to be selected
+ * - ``Parameter``
+ - The name of the parameter produced by this rule
+ * - ``Transform``
+ - Expression that defines the value of the parameter, use {PARAMETER} as a placeholder for the selected path element
+
+For commodity is also possible when defining this type of rules to configure that an existing parameter in the URL should be echoed to a get capabilities result.
+
+Example of a basic rule:
+
+.. figure:: images/basic_rule.png
+ :align: center
+
+ *Example of a basic rule defined in the UI*
+
+This rule will transform the URL::
+
+ /geoserver/tiger/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap
+
+in::
+
+ /geoserver/tiger/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27H11%27
+
+Advanced Rules
+-----------------------------------
+
+Advanced rules allow us to handle more complex uses cases where more flexibility is required.
+
+An advanced rule is defined by three mandatory attributes and four optional ones:
+
+.. list-table::
+ :widths: 10 80 10
+
+ * - **Attribute**
+ - **Description**
+ - **Mandatory**
+ * - ``Match``
+ - Regex match expression with groups, for example ^(?:/[^/]*){3}(/([^/]+)).*$ selects the URL base path third element
+ - Yes
+ * - ``Activation``
+ - If defined this rule will only be applied to URLs that match this regex expression
+ - No
+ * - ``Parameter``
+ - The name of the parameter produced by this rule
+ - Yes
+ * - ``Transform``
+ - Expression that defines the value of the parameter, use $1 ... $n as placeholders for groups defined in the match expression
+ - Yes
+ * - ``Remove``
+ - The match expression group to be removed from URL, by default 1
+ - No
+ * - ``Combine``
+ - Defines how to combine parameter existing value ($1 existing value, $2 new value), by default the value is overridden
+ - No
+ * - ``Repeat``
+ - If defined, Combine is applied not only once, but for every layer included in the LAYERS parameter, this allows filling parameters that require a value for each layer (e.g. STYLES or CQL_FILTER)
+ - No
+
+For commodity is also possible when defining this type of rules to configure that an existing parameter in the URL should be echoed to a get capabilities result.
+
+Example of an advanced rule:
+
+.. figure:: images/advanced_rule.png
+ :align: center
+
+ *Example of an advanced rule defined in the UI*
+
+This rule will transform the URL::
+
+ /geoserver/tiger/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27D68%27
+
+in::
+
+ /geoserver/tiger/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&CQL_FILTER=CFCC%3D%27D68%27+or+CFCC%3D%27H11%27
+
+No that this rule will also echo an existing cql_filter parameter to the get capabilities result.
+
+Example of an advanced rule with repeat:
+
+.. figure:: images/advanced_rule_repeat.png
+ :align: center
+
+ *Example of an advanced rule with repeat defined in the UI*
+
+This rule will transform the URL::
+
+ /geoserver/wms/H11?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=tiger,other
+
+in::
+
+ /geoserver/wms?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=tiger,otherCQL_FILTER=CFCC%3D%27D68%27%3BCFCC%3D%27H11%27
+
+Rules Management
+-----------------------------
+
+Rules can be managed and tested in the rules management UI. Besides the basic operations like add, remove and update is also possible to activate or deactivate rules. A deactivated rule will be ignored by this module.
+
+Follow a print screen of the rules management UI with all the rules previously defined:
+
+.. figure:: images/rules_management.png
+ :align: center
+
+ *Rules management UI*
+
+Note that the first rule (the advanced one) is not active.
+
+REST API
+--------
+
+The rules and echo parameters can also be managed by means of a REST API found at
+``geoserver/rest/params-extractor``. Documentation for it is available in
+:api:`Swagger format `
+
+Intercepting the security filters chain
+---------------------------------------
+By default, the params-extractor module does not interact with the security authentication filters.
+This is because the params-extractor filter is called later in the GeoServer filters chain.
+
+If you want params-extractor to work before the security filter chain, you have to configure it as
+a standard servlet filter in the GeoServer WEB-INF/web.xml file.
+
+This can be done adding the following to your current web.xml (immediately after the ``Set Character Encoding`` filter) and restarting GeoServer:
+
+ .. code-block:: xml
+
+
+
+ ...
+
+ ExtractParams
+ org.geoserver.params.extractor.Filter
+
+ ...
+
+ ExtractParams
+ /*
+
+ ...
+
\ No newline at end of file
diff --git a/doc/en/user/source/extensions/wmts-multidimensional/usage.rst b/doc/en/user/source/extensions/wmts-multidimensional/usage.rst
index 2d2e1d5d55b..5877c00bffe 100644
--- a/doc/en/user/source/extensions/wmts-multidimensional/usage.rst
+++ b/doc/en/user/source/extensions/wmts-multidimensional/usage.rst
@@ -1,531 +1,531 @@
-.. _wmts_multidminensional_usage:
-
-WMTS Multidimensional usage
-===========================
-
-All described operations including is optional parameters and other extensions were implemented, only the the REST interfaces for the domain discovery operations were not implemented.
-
-The ``GetFeature`` operation only supports the profile GML 3.1 as feature info format ("application/gml+xml; version=3.1") and the ``GetHistogram`` operation only supports ``text/xml`` as output format.
-
-
-This module support well defined dimensions like elevation and time and also custom dimensions.
-
-GetCapabilities
----------------
-
-The default behavior of WMTS is to list in the capabilities document all the values available in a certain dimension, something like this:
-
-.. code-block:: xml
-
-
- elevation
- 0.0
- 0.0
- 200.0
- 400.0
- 600.0
- 800.0
- 1000.0
- 1200.0
- 1400.0
- 1600.0
- 1800.0
- 2000.0
- 3000.0
- 4000.0
- 5000.0
- 6000.0
- 7000.0
- 8000.0
- 9000.0
-
-
-This module will instead take into account the presentation mode selected by the user:
-
-.. figure:: images/layer_dimensions.png
- :align: center
-
- *Configuration of a layer dimensions.*
-
-With the presentation mode select to ``Continuous interval`` or ``Resolution and interval`` we will instead see something like this:
-
-.. code-block:: xml
-
-
- elevation
- 0.0
- 0.0--9000.0
-
-
-Descriptions for the new introduced operations and associated formats will also be added to the capabilities document.
-
-Operations
-----------
-
-This module adds three new operations to the WMTS service that are described in detail in this `document `_:
-
-.. list-table::
- :widths: 20 80
- :header-rows: 1
-
- * - Operation
- - Description
- * - DescribeDomains
- - Describes all the dimension domains in a compact document, along with the restricted bounding box of the two dimensional space intercepted by the request.
- * - GetDomainValues
- - Allows to page through domain values (supplements DescribeDomains in case the domain has too many values, and the client still wants to get all of them, one page at a time)
- * - GetHistogram
- - Given a scattered domain description and an interval, this operation divides the interval in regular buckets, and provides an item count for each bucket.
- * - GetFeature
- - Enumerate the actual dimension possible values combinations, returns a list of features along with dimension values using the same formats as the feature info operation ("application/gml+xml; version=3.1").
-
-Note that currently there is no restful implementations of this operations.
-
-DescribeDomains
-^^^^^^^^^^^^^^^
-
-This operation is useful to understand which domains are available in our layer dimensions and how they relate to each other. The parameters available for this operation are:
-
-.. list-table::
- :widths: 20 10 70
- :header-rows: 1
-
- * - Name
- - Mandatory
- - Description
- * - Service=WMTS
- - Yes
- - Service type identifier
- * - Request=DescribeDomains
- - Yes
- - Operation name
- * - Version=1.0.0
- - Yes
- - Standard and schema version for this operation
- * - Layer
- - Yes
- - Layer identifier
- * - TileMatrixSet
- - Yes
- - Tile matrix set identifier
- * - bbox=minx,miny,maxx,maxy
- - No
- - Bounding box corners (lower left, upper right) in CRS units
- * - DimensionIdentifier
- - No
- - At most one per dimension, a range described as min/max, restricting the domain of this dimension
- * - Domains
- - No
- - A comma separated list of domain names to be returned, in case only a subset is required. The space domain is identified by "bbox".
- * - ExpandLimit
- - No
- - A numerical value, greater or equal to zero. If the number of unique domain values is below ``ExpandLimit`` then the domain with be represented in full, as
- a comma separated list of values, otherwise in compact form, as ``start--end``. The server assumes a built-in limit of 200 in case not specified,
- and allows client to specify a value up to 10000, values can be tuned via the user interface, in the WMTS panel (server defaults) and on a layer
- by layer basis.
-
-.. figure:: images/expandLimitConfig.png
- :align: center
-
- *Configuration domain expansion limits.*
-
-
-
-The ``bbox`` parameter allows the client to restrict the ``DescribeDomains`` operation to a certain spatial area, by default the layer extent will be used.
-
-The ``DimensionIdentifier`` parameter can be used to restrict the domain values of a certain dimension, this is useful to answer questions like which elevations values are available in a specific day.
-
-A simple ``DescribeDomains`` request will look like this:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326
-
-and the result will be similar to this:
-
-.. code-block:: xml
-
-
-
-
-
-
- elevation
- 0.0,200.0,400.0,600.0,800.0,1000.0
- 6
-
-
- REFERENCE_TIME
- 2016-02-23T00:00:00.000Z,2016-02-24T00:00:00.000Z
- 2
-
-
- time
- 2016-02-23T03:00:00.000Z,2016-02-23T06:00:00.000Z
- 2
-
-
-
-From the information above we can see that we have three dimensions ``time``, ``elevation`` and ``REFERENCE_TIME`` and the respective domains values.
-
-Now let's see how elevations relate to time dimension by asking which elevations under 500.0 meters are available at time 2016-02-23T03:00:00.000Z:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z
-
-the result will be similar to this:
-
-.. code-block:: xml
-
-
-
-
-
-
- elevation
- 200.0
- 1
-
-
- REFERENCE_TIME
- 2016-02-23T00:00:00.000Z
- 1
-
-
- time
- 2016-02-23T03:00:00.000Z
- 1
-
-
-
-So for time 2016-02-23T03:00:00.000Z there is only values measured at 200.0 meters.
-
-In case only the space domain is of interest, the following request will do:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z&domains=bbox
-
-and the result will be similar to this:
-
-.. code-block:: xml
-
-
-
-
-
-
-
-GetDomainValues
-^^^^^^^^^^^^^^^
-
-This operation is useful to page through the values of a given domain, in case the "multidimensional" area of interest
-is too large for DescribeDomain to return them in a single shot.
-
-.. list-table::
- :widths: 20 10 70
- :header-rows: 1
-
- * - Name
- - Mandatory
- - Description
- * - Service=WMTS
- - Yes
- - Service type identifier
- * - Request=GetDomainValues
- - Yes
- - Operation name
- * - Version=1.0.0
- - Yes
- - Standard and schema version for this operation
- * - Layer
- - Yes
- - Layer identifier
- * - bbox=minx,miny,maxx,maxy
- - No
- - Bounding box corners (lower left, upper right) in CRS units
- * - DimensionIdentifier
- - No
- - At most one per dimension, a range described as min/max, restricting the domain of this dimension
- * - Domain
- - Yes
- - Name of the domain whose values will be returned (one cannot use "bbox", only single value dimensions can be enumerated by GetDomainValues, e.g., time, elevation).
- * - FromValue
- - No
- - Sets the beginning of domain enumeration, for paging purposes. It's not included in the result
- * - Sort
- - No
- - Can be "asc" or "desc", determines if the enumeration is from low to high, or from high to low
- * - Limit
- - No
- - Maximum number of values returned by this call. The server assumes a built-in limit of 1000 in case not specified,
- and allows client to specify a value up to 10000.
-
-For example, let's say a "elevation" domain has values 1,2,3 and 5, and that we are paging through
-it by pages of 2 elements. The client will start without providing a "fromValue", and will then continue
-using the last value of the previous page as a reference:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 1.0,2.0
- 2
-
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=2
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 2.0
- 3.0,5.0
- 2
-
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=5
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 5.0
-
- 0
-
-
-For elevations it might not be uncommon to iterate backwards, from the top-most elevation down to the lowest value. The interaction
-between client and server migth then look as follows:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&sort=desc
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 5.0,3.0
- 2
-
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=3&sort=desc
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 3.0
- 2.0,1.0
- 2
-
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=1&sort=desc
-
-.. code-block:: xml
-
-
- elevation
- 2
- asc
- 1.0
-
- 0
-
-
-The paging approach might seem odd for those used to using "limit" and "offset". The main reason it's done
-this way it's performance, paging through unique values via limit and offset means that the data source
-has to compute and collect the unique values that are not needed (the ones in previous pages) in order to
-find the ones in the current page. With large domains (typical of time series) this quickly becomes too
-slow for interactive usage, as one moves forward in the domain.
-
-By giving a starting point, the unneeded data points can be skipped via index and the distinct value
-computation can be performed only on the current page data, stopping it as soon as the desired number
-of results has been computed. With an index on the dimension being queries, this results in nearly
-constant response times, regardless of the page being requested.
-
-GetHistogram
-^^^^^^^^^^^^
-
-This operation can be used to provide information about the data distribution between the minimum and maximum values of a certain dimension.
-
-The parameters available for this operation are:
-
-.. list-table::
- :widths: 20 10 70
- :header-rows: 1
-
- * - Name
- - Mandatory
- - Description
- * - Service=WMTS
- - Yes
- - Service type identifier
- * - Request=GetHistogram
- - Yes
- - Operation name
- * - Version=1.0.0
- - Yes
- - Standard and schema version for this operation
- * - Layer
- - Yes
- - Layer identifier
- * - TileMatrixSet
- - Yes
- - Tile matrix set identifier
- * - BBOX=minx,miny,maxx,maxy
- - No
- - Bounding box corners (lower left, upper right) in CRS units
- * - DimensionIdentifier
- - No
- - At most one per dimension, a range described as min/max, restricting the domain of this dimension
- * - Histogram
- - Yes
- - Name of the dimension for which the histogram will be computed
- * - Resolution
- - No
- - Suggested size of the histogram bucket. Cannot be provided for enumerated dimensions, will use the period syntax for time (e.g. PT1H), a number for numeric dimensions, or auto to leave the decision to the server
- * - Format
- - No
- - The desired output format, default is text/html.
-
-The parameters common to the ``DescribeDomains`` operation work as already described above. Currently only the ``text/xml`` output format is supported.
-
-The following example request the histogram for time dimension with a resolution of 8 hours restricting elevations between 500.0 and 1000.0 meters:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=GetHistogram&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&histogram=time&resolution=PT8H&elevation=500.0/1000.0
-
-and the result will be similar to this:
-
-.. code-block:: xml
-
-
- time
- 2016-02-23T00:00:00.000Z/2016-02-25T00:00:00.000Z/PT8H
- 240,0,240,0,0,240
-
-
-Looking at the result we can conclude that measurements between 500.0 and 1000.0 meters are typically done during the night.
-
-The bucket matching is setup so that each one contains its first value, but not its last value (which is contained in the next bucket instead).
-This is important to understand the results. Say we have a dataset with regular elevations, from 0 to 100 with a step of 10, and the
-request calls for elevations between 0 and 20. Then the results will look something like follows:
-
-.. code-block:: xml
-
-
- elevation
- 0/30/10
- 5,3,8
-
-
-That is, there values catch the intervals [0,10[, [10, 20[, and [20, 30[ (to have a bucket for the images/features
-having elevation exactly matching 20). This will happen only if an extreme value if found, the same request
-filtering on elevations between 0 and 15 will return this instead:
-
-.. code-block:: xml
-
-
- elevation
- 0/20/10
- 5,3
-
-
-GetFeature
-^^^^^^^^^^
-
-This operation is capable to enumerate the actual possible values combinations. The output of this operation is similar to the output of the ``WFS 2.0 GetFeature`` operation which is a list of features along with dimension values using the same formats as the feature info operation. This output can be used to draw the features on a map for example.
-
-The parameters available for this operation are:
-
-.. list-table::
- :widths: 20 10 70
- :header-rows: 1
-
- * - Name
- - Mandatory
- - Description
- * - Service=WMTS
- - Yes
- - Service type identifier
- * - Request=GetFeature
- - Yes
- - Operation name
- * - Version=1.0.0
- - Yes
- - Standard and schema version for this operation
- * - Layer
- - Yes
- - Layer identifier
- * - TileMatrixSet
- - Yes
- - Tile matrix set identifier
- * - BBOX=minx,miny,maxx,maxy
- - No
- - Bounding box corners (lower left, upper right) in CRS units
- * - DimensionIdentifier
- - No
- - At most one per dimension, a range described as min/max, restricting the domain of this dimension
- * - Format
- - Yes
- - The desired output format
-
-The parameters common to the ``DescribeDomains`` operation work as already described above. Currently only the ``application/gml+xml; version=3.1`` output format is supported.
-
-Using the same restrictions parameters we used for the second request used as an example for the ``DescribeDomains`` operation a ``GetFeature`` request will look like this:
-
-.. code-block:: guess
-
- http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=GetFeature&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z
-
-and the result will be similar to this:
-
-.. code-block:: xml
-
-
-
-
-
-
-
- -180.125 -90.125 -180.125 89.875 179.875 89.875 179.875 -90.125 -180.125 -90.125
-
-
-
-
- 200.0
- 2016-02-23T03:00:00.000Z
- 2016-02-23T00:00:00.000Z
-
-
-
-Note how this result correlate with the correspondent ``DescribeDomains`` operation result.
+.. _wmts_multidminensional_usage:
+
+WMTS Multidimensional usage
+===========================
+
+All described operations including is optional parameters and other extensions were implemented, only the the REST interfaces for the domain discovery operations were not implemented.
+
+The ``GetFeature`` operation only supports the profile GML 3.1 as feature info format ("application/gml+xml; version=3.1") and the ``GetHistogram`` operation only supports ``text/xml`` as output format.
+
+
+This module support well defined dimensions like elevation and time and also custom dimensions.
+
+GetCapabilities
+---------------
+
+The default behavior of WMTS is to list in the capabilities document all the values available in a certain dimension, something like this:
+
+.. code-block:: xml
+
+
+ elevation
+ 0.0
+ 0.0
+ 200.0
+ 400.0
+ 600.0
+ 800.0
+ 1000.0
+ 1200.0
+ 1400.0
+ 1600.0
+ 1800.0
+ 2000.0
+ 3000.0
+ 4000.0
+ 5000.0
+ 6000.0
+ 7000.0
+ 8000.0
+ 9000.0
+
+
+This module will instead take into account the presentation mode selected by the user:
+
+.. figure:: images/layer_dimensions.png
+ :align: center
+
+ *Configuration of a layer dimensions.*
+
+With the presentation mode select to ``Continuous interval`` or ``Resolution and interval`` we will instead see something like this:
+
+.. code-block:: xml
+
+
+ elevation
+ 0.0
+ 0.0--9000.0
+
+
+Descriptions for the new introduced operations and associated formats will also be added to the capabilities document.
+
+Operations
+----------
+
+This module adds three new operations to the WMTS service that are described in detail in this `document `_:
+
+.. list-table::
+ :widths: 20 80
+ :header-rows: 1
+
+ * - Operation
+ - Description
+ * - DescribeDomains
+ - Describes all the dimension domains in a compact document, along with the restricted bounding box of the two dimensional space intercepted by the request.
+ * - GetDomainValues
+ - Allows to page through domain values (supplements DescribeDomains in case the domain has too many values, and the client still wants to get all of them, one page at a time)
+ * - GetHistogram
+ - Given a scattered domain description and an interval, this operation divides the interval in regular buckets, and provides an item count for each bucket.
+ * - GetFeature
+ - Enumerate the actual dimension possible values combinations, returns a list of features along with dimension values using the same formats as the feature info operation ("application/gml+xml; version=3.1").
+
+Note that currently there is no restful implementations of this operations.
+
+DescribeDomains
+^^^^^^^^^^^^^^^
+
+This operation is useful to understand which domains are available in our layer dimensions and how they relate to each other. The parameters available for this operation are:
+
+.. list-table::
+ :widths: 20 10 70
+ :header-rows: 1
+
+ * - Name
+ - Mandatory
+ - Description
+ * - Service=WMTS
+ - Yes
+ - Service type identifier
+ * - Request=DescribeDomains
+ - Yes
+ - Operation name
+ * - Version=1.0.0
+ - Yes
+ - Standard and schema version for this operation
+ * - Layer
+ - Yes
+ - Layer identifier
+ * - TileMatrixSet
+ - Yes
+ - Tile matrix set identifier
+ * - bbox=minx,miny,maxx,maxy
+ - No
+ - Bounding box corners (lower left, upper right) in CRS units
+ * - DimensionIdentifier
+ - No
+ - At most one per dimension, a range described as min/max, restricting the domain of this dimension
+ * - Domains
+ - No
+ - A comma separated list of domain names to be returned, in case only a subset is required. The space domain is identified by "bbox".
+ * - ExpandLimit
+ - No
+ - A numerical value, greater or equal to zero. If the number of unique domain values is below ``ExpandLimit`` then the domain with be represented in full, as
+ a comma separated list of values, otherwise in compact form, as ``start--end``. The server assumes a built-in limit of 200 in case not specified,
+ and allows client to specify a value up to 10000, values can be tuned via the user interface, in the WMTS panel (server defaults) and on a layer
+ by layer basis.
+
+.. figure:: images/expandLimitConfig.png
+ :align: center
+
+ *Configuration domain expansion limits.*
+
+
+
+The ``bbox`` parameter allows the client to restrict the ``DescribeDomains`` operation to a certain spatial area, by default the layer extent will be used.
+
+The ``DimensionIdentifier`` parameter can be used to restrict the domain values of a certain dimension, this is useful to answer questions like which elevations values are available in a specific day.
+
+A simple ``DescribeDomains`` request will look like this:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326
+
+and the result will be similar to this:
+
+.. code-block:: xml
+
+
+
+
+
+
+ elevation
+ 0.0,200.0,400.0,600.0,800.0,1000.0
+ 6
+
+
+ REFERENCE_TIME
+ 2016-02-23T00:00:00.000Z,2016-02-24T00:00:00.000Z
+ 2
+
+
+ time
+ 2016-02-23T03:00:00.000Z,2016-02-23T06:00:00.000Z
+ 2
+
+
+
+From the information above we can see that we have three dimensions ``time``, ``elevation`` and ``REFERENCE_TIME`` and the respective domains values.
+
+Now let's see how elevations relate to time dimension by asking which elevations under 500.0 meters are available at time 2016-02-23T03:00:00.000Z:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z
+
+the result will be similar to this:
+
+.. code-block:: xml
+
+
+
+
+
+
+ elevation
+ 200.0
+ 1
+
+
+ REFERENCE_TIME
+ 2016-02-23T00:00:00.000Z
+ 1
+
+
+ time
+ 2016-02-23T03:00:00.000Z
+ 1
+
+
+
+So for time 2016-02-23T03:00:00.000Z there is only values measured at 200.0 meters.
+
+In case only the space domain is of interest, the following request will do:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=DescribeDomains&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z&domains=bbox
+
+and the result will be similar to this:
+
+.. code-block:: xml
+
+
+
+
+
+
+
+GetDomainValues
+^^^^^^^^^^^^^^^
+
+This operation is useful to page through the values of a given domain, in case the "multidimensional" area of interest
+is too large for DescribeDomain to return them in a single shot.
+
+.. list-table::
+ :widths: 20 10 70
+ :header-rows: 1
+
+ * - Name
+ - Mandatory
+ - Description
+ * - Service=WMTS
+ - Yes
+ - Service type identifier
+ * - Request=GetDomainValues
+ - Yes
+ - Operation name
+ * - Version=1.0.0
+ - Yes
+ - Standard and schema version for this operation
+ * - Layer
+ - Yes
+ - Layer identifier
+ * - bbox=minx,miny,maxx,maxy
+ - No
+ - Bounding box corners (lower left, upper right) in CRS units
+ * - DimensionIdentifier
+ - No
+ - At most one per dimension, a range described as min/max, restricting the domain of this dimension
+ * - Domain
+ - Yes
+ - Name of the domain whose values will be returned (one cannot use "bbox", only single value dimensions can be enumerated by GetDomainValues, e.g., time, elevation).
+ * - FromValue
+ - No
+ - Sets the beginning of domain enumeration, for paging purposes. It's not included in the result
+ * - Sort
+ - No
+ - Can be "asc" or "desc", determines if the enumeration is from low to high, or from high to low
+ * - Limit
+ - No
+ - Maximum number of values returned by this call. The server assumes a built-in limit of 1000 in case not specified,
+ and allows client to specify a value up to 10000.
+
+For example, let's say a "elevation" domain has values 1,2,3 and 5, and that we are paging through
+it by pages of 2 elements. The client will start without providing a "fromValue", and will then continue
+using the last value of the previous page as a reference:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 1.0,2.0
+ 2
+
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=2
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 2.0
+ 3.0,5.0
+ 2
+
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=5
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 5.0
+
+ 0
+
+
+For elevations it might not be uncommon to iterate backwards, from the top-most elevation down to the lowest value. The interaction
+between client and server migth then look as follows:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&sort=desc
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 5.0,3.0
+ 2
+
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=3&sort=desc
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 3.0
+ 2.0,1.0
+ 2
+
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?request=GetDomainValues&Version=1.0.0&Layer=sampleLayer&domain=elevation&limit=2&fromValue=1&sort=desc
+
+.. code-block:: xml
+
+
+ elevation
+ 2
+ asc
+ 1.0
+
+ 0
+
+
+The paging approach might seem odd for those used to using "limit" and "offset". The main reason it's done
+this way it's performance, paging through unique values via limit and offset means that the data source
+has to compute and collect the unique values that are not needed (the ones in previous pages) in order to
+find the ones in the current page. With large domains (typical of time series) this quickly becomes too
+slow for interactive usage, as one moves forward in the domain.
+
+By giving a starting point, the unneeded data points can be skipped via index and the distinct value
+computation can be performed only on the current page data, stopping it as soon as the desired number
+of results has been computed. With an index on the dimension being queries, this results in nearly
+constant response times, regardless of the page being requested.
+
+GetHistogram
+^^^^^^^^^^^^
+
+This operation can be used to provide information about the data distribution between the minimum and maximum values of a certain dimension.
+
+The parameters available for this operation are:
+
+.. list-table::
+ :widths: 20 10 70
+ :header-rows: 1
+
+ * - Name
+ - Mandatory
+ - Description
+ * - Service=WMTS
+ - Yes
+ - Service type identifier
+ * - Request=GetHistogram
+ - Yes
+ - Operation name
+ * - Version=1.0.0
+ - Yes
+ - Standard and schema version for this operation
+ * - Layer
+ - Yes
+ - Layer identifier
+ * - TileMatrixSet
+ - Yes
+ - Tile matrix set identifier
+ * - BBOX=minx,miny,maxx,maxy
+ - No
+ - Bounding box corners (lower left, upper right) in CRS units
+ * - DimensionIdentifier
+ - No
+ - At most one per dimension, a range described as min/max, restricting the domain of this dimension
+ * - Histogram
+ - Yes
+ - Name of the dimension for which the histogram will be computed
+ * - Resolution
+ - No
+ - Suggested size of the histogram bucket. Cannot be provided for enumerated dimensions, will use the period syntax for time (e.g. PT1H), a number for numeric dimensions, or auto to leave the decision to the server
+ * - Format
+ - No
+ - The desired output format, default is text/html.
+
+The parameters common to the ``DescribeDomains`` operation work as already described above. Currently only the ``text/xml`` output format is supported.
+
+The following example request the histogram for time dimension with a resolution of 8 hours restricting elevations between 500.0 and 1000.0 meters:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=GetHistogram&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&histogram=time&resolution=PT8H&elevation=500.0/1000.0
+
+and the result will be similar to this:
+
+.. code-block:: xml
+
+
+ time
+ 2016-02-23T00:00:00.000Z/2016-02-25T00:00:00.000Z/PT8H
+ 240,0,240,0,0,240
+
+
+Looking at the result we can conclude that measurements between 500.0 and 1000.0 meters are typically done during the night.
+
+The bucket matching is setup so that each one contains its first value, but not its last value (which is contained in the next bucket instead).
+This is important to understand the results. Say we have a dataset with regular elevations, from 0 to 100 with a step of 10, and the
+request calls for elevations between 0 and 20. Then the results will look something like follows:
+
+.. code-block:: xml
+
+
+ elevation
+ 0/30/10
+ 5,3,8
+
+
+That is, there values catch the intervals [0,10[, [10, 20[, and [20, 30[ (to have a bucket for the images/features
+having elevation exactly matching 20). This will happen only if an extreme value if found, the same request
+filtering on elevations between 0 and 15 will return this instead:
+
+.. code-block:: xml
+
+
+ elevation
+ 0/20/10
+ 5,3
+
+
+GetFeature
+^^^^^^^^^^
+
+This operation is capable to enumerate the actual possible values combinations. The output of this operation is similar to the output of the ``WFS 2.0 GetFeature`` operation which is a list of features along with dimension values using the same formats as the feature info operation. This output can be used to draw the features on a map for example.
+
+The parameters available for this operation are:
+
+.. list-table::
+ :widths: 20 10 70
+ :header-rows: 1
+
+ * - Name
+ - Mandatory
+ - Description
+ * - Service=WMTS
+ - Yes
+ - Service type identifier
+ * - Request=GetFeature
+ - Yes
+ - Operation name
+ * - Version=1.0.0
+ - Yes
+ - Standard and schema version for this operation
+ * - Layer
+ - Yes
+ - Layer identifier
+ * - TileMatrixSet
+ - Yes
+ - Tile matrix set identifier
+ * - BBOX=minx,miny,maxx,maxy
+ - No
+ - Bounding box corners (lower left, upper right) in CRS units
+ * - DimensionIdentifier
+ - No
+ - At most one per dimension, a range described as min/max, restricting the domain of this dimension
+ * - Format
+ - Yes
+ - The desired output format
+
+The parameters common to the ``DescribeDomains`` operation work as already described above. Currently only the ``application/gml+xml; version=3.1`` output format is supported.
+
+Using the same restrictions parameters we used for the second request used as an example for the ``DescribeDomains`` operation a ``GetFeature`` request will look like this:
+
+.. code-block:: guess
+
+ http://localhost:8080/geoserver/gwc/service/wmts?REQUEST=GetFeature&Version=1.0.0&Layer=some_layer&TileMatrixSet=EPSG:4326&elevation=0/500&time=2016-02-23T03:00:00.000Z
+
+and the result will be similar to this:
+
+.. code-block:: xml
+
+
+
+
+
+
+
+ -180.125 -90.125 -180.125 89.875 179.875 89.875 179.875 -90.125 -180.125 -90.125
+
+
+
+
+ 200.0
+ 2016-02-23T03:00:00.000Z
+ 2016-02-23T00:00:00.000Z
+
+
+
+Note how this result correlate with the correspondent ``DescribeDomains`` operation result.
diff --git a/doc/en/user/source/filter/filter_reference.rst b/doc/en/user/source/filter/filter_reference.rst
index b2715dea770..10efedefa10 100644
--- a/doc/en/user/source/filter/filter_reference.rst
+++ b/doc/en/user/source/filter/filter_reference.rst
@@ -1,405 +1,405 @@
-.. _filter_fe_reference:
-
-Filter Encoding Reference
-=========================
-
-This is a reference for the **Filter Encoding** language
-implemented in GeoServer.
-The Filter Encoding language uses an XML-based syntax.
-It is defined by the `OGC Filter Encoding standard `_.
-
-Filters are used to select features or other objects from the context in which they are evaluated.
-They are similar in functionality to the SQL "WHERE" clause.
-A filter is specified using a **condition**.
-
-.. _filter_condition:
-
-Condition
----------
-
-A condition is a single :ref:`filter_predicate` element,
-or a combination of conditions by :ref:`filter_logical`.
-
-.. _filter_predicate:
-
-Predicate
----------
-
-Predicates are boolean-valued expressions which compute relationships between values.
-A predicate is specified by using a **comparison operator** or a **spatial operator**.
-The operators are used to compare properties of the features being filtered
-to other feature properties or to literal data.
-
-Comparison operators
-^^^^^^^^^^^^^^^^^^^^
-
-Comparison operators are used to specify conditions on non-spatial attributes.
-
-Binary Comparison operators
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The **binary comparison operators** are:
-
- * ````
- * ````
- * ````
- * ````
- * ````
- * ````
-
-They contain the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - :ref:`filter_expression`
- - Yes
- - The first value to compare.
- Often a ````.
- * - :ref:`filter_expression`
- - Yes
- - The second value to compare
-
-Binary comparison operator elements may include an optional ``matchCase`` attribute,
-with the value ``true`` or ``false``.
-If this attribute is ``true`` (the default), string comparisons are case-sensitive.
-If the attribute is ``false`` strings comparisons do not check case.
-
-PropertyIsLike operator
-~~~~~~~~~~~~~~~~~~~~~~~
-
-The ```` operator matches a string property value against a text **pattern**.
-It contains the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - ````
- - Yes
- - Contains a string specifying the name of the property to test
- * - ````
- - Yes
- - Contains a pattern string to be matched
-
-The pattern is specified by a sequence of regular characters and
-three special pattern characters.
-The pattern characters are defined by the following *required* attributes of the ```` element:
-
- * ``wildCard`` specifies the pattern character which matches any sequence of zero or more string characters
- * ``singleChar`` specifies the pattern character which matches any single string character
- * ``escapeChar`` specifies the escape character which can be used to escape the pattern characters
-
-PropertyIsNull operator
-~~~~~~~~~~~~~~~~~~~~~~~
-
-The ```` operator tests whether a property value is null.
-It contains the element:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - ````
- - Yes
- - contains a string specifying the name of the property to be tested
-
-PropertyIsBetweeen operator
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The ```` operator tests whether an expression value lies within a range
-given by a lower and upper bound (inclusive).
-It contains the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - :ref:`filter_expression`
- - Yes
- - The value to test
- * - ````
- - Yes
- - Contains an :ref:`filter_expression` giving the lower bound of the range
- * - ````
- - Yes
- - Contains an :ref:`filter_expression` giving the upper bound of the range
-
-
-Spatial operators
-^^^^^^^^^^^^^^^^^
-
-Spatial operators are used to specify conditions on the geometric attributes of a feature.
-The following spatial operators are available:
-
-Topological operators
-~~~~~~~~~~~~~~~~~~~~~
-
-These operators test topological spatial relationships using the standard OGC Simple Features predicates:
-
- * ```` - Tests whether two geometries intersect
- * ```` - Tests whether two geometries are disjoint (do not interact)
- * ```` - Tests whether a geometry contains another one
- * ```` - Tests whether a geometry is within another one
- * ```` - Tests whether two geometries touch
- * ```` - Tests whether two geometries cross
- * ```` - Tests whether two geometries overlap
- * ```` - Tests whether two geometries are topologically equal
-
-These contains the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - ````
- - Yes
- - Contains a string specifying the name of the geometry-valued property to be tested.
- * - *GML Geometry*
- - Yes
- - A GML literal value specifying the geometry to test against
-
-Distance operators
-~~~~~~~~~~~~~~~~~~
-
-These operators test distance relationships between a geometry property and a geometry literal:
-
- * ````
- * ````
-
-They contain the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - ````
- - Yes
- - Contains a string specifying the name of the property to be tested.
- If omitted, the *default geometry attribute* is assumed.
- * - *GML Geometry*
- - Yes
- - A literal value specifying a geometry to compute the distance to.
- This may be either a geometry or an envelope in GML 3 format
- * - ````
- - Yes
- - Contains the numeric value for the distance tolerance.
- The element may include an optional ``units`` attribute.
-
-
-Bounding Box operator
-~~~~~~~~~~~~~~~~~~~~~
-
-The ```` operator tests whether a geometry-valued property intersects a fixed bounding box.
-It contains the elements:
-
-.. list-table::
- :widths: 25 15 60
-
- * - **Element**
- - **Required?**
- - **Description**
- * - ````
- - No
- - Contains a string specifying the name of the property to be tested.
- If omitted, the *default geometry attribute* is assumed.
- * - ````
- - Yes
- - A GML Box literal value specifying the bounding box to test against
-
-
-Examples
-~~~~~~~~
-
-* This filter selects features with a geometry that intersects the point (1,1).
-
-.. code-block:: xml
-
-
- GEOMETRY
-
- 1 1
-
-
-
-* This filter selects features with a geometry that overlaps a polygon.
-
-.. code-block:: xml
-
-
- Geometry
-
-
-
- ...
-
-
-
-
-
-* This filter selects features with a geometry that intersects
- the geographic extent [-10,0 : 10,10].
-
-.. code-block:: xml
-
-
- GEOMETRY
-
-
- -100
-
-
- 1010
-
-
-
-
-
-.. _filter_logical:
-
-Logical operators
------------------
-
-Logical operators are used to specify
-logical combinations of :ref:`filter_condition` elements
-(which may be either :ref:`filter_predicate` elements or other **logical operators**).
-They may be nested to any depth.
-
-The following logical operators are available:
-
- * ```` - computes the logical conjunction of the operands
- * ```` - computes the logical disjunction of the operands
-
-The content for ```` and ```` is two operands given by :ref:`filter_condition` elements.
-
- * ```` - computes the logical negation of the operand
-
-The content for ```` is a single operand given by a :ref:`filter_condition` element.
-
-Examples
-^^^^^^^^
-
-* This filter uses ```` to combine a comparison predicate and a spatial predicate:
-
-.. code-block:: xml
-
-
-
- NAME
- New York
-
-
- GEOMETRY
-
-
- 1 1
-
-
-
-
-
-
-.. _filter_expression:
-
-Expression
-----------
-
-**Filter expressions** specify constant, variable or computed data values.
-An expression is formed from one of the following elements
-(some of which contain sub-expressions,
-meaning that expressions may be of arbitrary depth):
-
-Arithmetic operators
-^^^^^^^^^^^^^^^^^^^^
-
-The **arithmetic operator** elements compute arithmetic operations on numeric values.
-
- * ```` - adds the two operands
- * ```` - subtracts the second operand from the first
- * ```` - multiplies the two operands
- * ``
`` - divides the first operand by the second
-
-Each arithmetic operator element contains two :ref:`filter_expression` elements
-providing the operands.
-
-Function
-^^^^^^^^
-
-The ```` element specifies a filter function to be evaluated.
-The required ``name`` attribute gives the function name.
-The element contains a sequence of zero or more
-:ref:`filter_expression` elements providing the values of the function arguments.
-
-See the :ref:`filter_function_reference` for details of the functions provided by GeoServer.
-
-Property Value
-^^^^^^^^^^^^^^
-
-The ```` element refers to the value of a feature attribute.
-It contains a **string** or an **XPath expression** specifying the attribute name.
-
-Literal
-^^^^^^^
-
-The ```` element specifies a constant value.
-It contains data of one of the following types:
-
-.. list-table::
- :widths: 25 75
-
- * - **Type**
- - **Description**
- * - Numeric
- - A string representing a numeric value (integer or decimal).
- * - Boolean
- - A boolean value of ``true`` or ``false``.
- * - String
- - A string value.
- XML-incompatible text may be included by using
- **character entities** or ```` delimiters.
- * - Date
- - A string representing a date.
- * - Geometry
- - An element specifying a geometry in GML3 format.
-
-WFS 2.0 namespaces
-------------------
-
-WFS 2.0 does not depend on any one GML version and thus requires an explicit namespace and schemaLocation for GML.
-In a GET request, namespaces can be placed on a Filter element (that is, ``filter=`` the block below, URL-encoded):
-
-.. code-block:: xml
-
-
-
-
- sf:the_geom
-
-
-
- 590431 4915204 590430
- 4915205 590429 4915204 590430
- 4915203 590431 4915204
-
-
-
-
-
-
+.. _filter_fe_reference:
+
+Filter Encoding Reference
+=========================
+
+This is a reference for the **Filter Encoding** language
+implemented in GeoServer.
+The Filter Encoding language uses an XML-based syntax.
+It is defined by the `OGC Filter Encoding standard `_.
+
+Filters are used to select features or other objects from the context in which they are evaluated.
+They are similar in functionality to the SQL "WHERE" clause.
+A filter is specified using a **condition**.
+
+.. _filter_condition:
+
+Condition
+---------
+
+A condition is a single :ref:`filter_predicate` element,
+or a combination of conditions by :ref:`filter_logical`.
+
+.. _filter_predicate:
+
+Predicate
+---------
+
+Predicates are boolean-valued expressions which compute relationships between values.
+A predicate is specified by using a **comparison operator** or a **spatial operator**.
+The operators are used to compare properties of the features being filtered
+to other feature properties or to literal data.
+
+Comparison operators
+^^^^^^^^^^^^^^^^^^^^
+
+Comparison operators are used to specify conditions on non-spatial attributes.
+
+Binary Comparison operators
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The **binary comparison operators** are:
+
+ * ````
+ * ````
+ * ````
+ * ````
+ * ````
+ * ````
+
+They contain the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - :ref:`filter_expression`
+ - Yes
+ - The first value to compare.
+ Often a ````.
+ * - :ref:`filter_expression`
+ - Yes
+ - The second value to compare
+
+Binary comparison operator elements may include an optional ``matchCase`` attribute,
+with the value ``true`` or ``false``.
+If this attribute is ``true`` (the default), string comparisons are case-sensitive.
+If the attribute is ``false`` strings comparisons do not check case.
+
+PropertyIsLike operator
+~~~~~~~~~~~~~~~~~~~~~~~
+
+The ```` operator matches a string property value against a text **pattern**.
+It contains the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - ````
+ - Yes
+ - Contains a string specifying the name of the property to test
+ * - ````
+ - Yes
+ - Contains a pattern string to be matched
+
+The pattern is specified by a sequence of regular characters and
+three special pattern characters.
+The pattern characters are defined by the following *required* attributes of the ```` element:
+
+ * ``wildCard`` specifies the pattern character which matches any sequence of zero or more string characters
+ * ``singleChar`` specifies the pattern character which matches any single string character
+ * ``escapeChar`` specifies the escape character which can be used to escape the pattern characters
+
+PropertyIsNull operator
+~~~~~~~~~~~~~~~~~~~~~~~
+
+The ```` operator tests whether a property value is null.
+It contains the element:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - ````
+ - Yes
+ - contains a string specifying the name of the property to be tested
+
+PropertyIsBetweeen operator
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ```` operator tests whether an expression value lies within a range
+given by a lower and upper bound (inclusive).
+It contains the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - :ref:`filter_expression`
+ - Yes
+ - The value to test
+ * - ````
+ - Yes
+ - Contains an :ref:`filter_expression` giving the lower bound of the range
+ * - ````
+ - Yes
+ - Contains an :ref:`filter_expression` giving the upper bound of the range
+
+
+Spatial operators
+^^^^^^^^^^^^^^^^^
+
+Spatial operators are used to specify conditions on the geometric attributes of a feature.
+The following spatial operators are available:
+
+Topological operators
+~~~~~~~~~~~~~~~~~~~~~
+
+These operators test topological spatial relationships using the standard OGC Simple Features predicates:
+
+ * ```` - Tests whether two geometries intersect
+ * ```` - Tests whether two geometries are disjoint (do not interact)
+ * ```` - Tests whether a geometry contains another one
+ * ```` - Tests whether a geometry is within another one
+ * ```` - Tests whether two geometries touch
+ * ```` - Tests whether two geometries cross
+ * ```` - Tests whether two geometries overlap
+ * ```` - Tests whether two geometries are topologically equal
+
+These contains the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - ````
+ - Yes
+ - Contains a string specifying the name of the geometry-valued property to be tested.
+ * - *GML Geometry*
+ - Yes
+ - A GML literal value specifying the geometry to test against
+
+Distance operators
+~~~~~~~~~~~~~~~~~~
+
+These operators test distance relationships between a geometry property and a geometry literal:
+
+ * ````
+ * ````
+
+They contain the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - ````
+ - Yes
+ - Contains a string specifying the name of the property to be tested.
+ If omitted, the *default geometry attribute* is assumed.
+ * - *GML Geometry*
+ - Yes
+ - A literal value specifying a geometry to compute the distance to.
+ This may be either a geometry or an envelope in GML 3 format
+ * - ````
+ - Yes
+ - Contains the numeric value for the distance tolerance.
+ The element may include an optional ``units`` attribute.
+
+
+Bounding Box operator
+~~~~~~~~~~~~~~~~~~~~~
+
+The ```` operator tests whether a geometry-valued property intersects a fixed bounding box.
+It contains the elements:
+
+.. list-table::
+ :widths: 25 15 60
+
+ * - **Element**
+ - **Required?**
+ - **Description**
+ * - ````
+ - No
+ - Contains a string specifying the name of the property to be tested.
+ If omitted, the *default geometry attribute* is assumed.
+ * - ````
+ - Yes
+ - A GML Box literal value specifying the bounding box to test against
+
+
+Examples
+~~~~~~~~
+
+* This filter selects features with a geometry that intersects the point (1,1).
+
+.. code-block:: xml
+
+
+ GEOMETRY
+
+ 1 1
+
+
+
+* This filter selects features with a geometry that overlaps a polygon.
+
+.. code-block:: xml
+
+
+ Geometry
+
+
+
+ ...
+
+
+
+
+
+* This filter selects features with a geometry that intersects
+ the geographic extent [-10,0 : 10,10].
+
+.. code-block:: xml
+
+
+ GEOMETRY
+
+
+ -100
+
+
+ 1010
+
+
+
+
+
+.. _filter_logical:
+
+Logical operators
+-----------------
+
+Logical operators are used to specify
+logical combinations of :ref:`filter_condition` elements
+(which may be either :ref:`filter_predicate` elements or other **logical operators**).
+They may be nested to any depth.
+
+The following logical operators are available:
+
+ * ```` - computes the logical conjunction of the operands
+ * ```` - computes the logical disjunction of the operands
+
+The content for ```` and ```` is two operands given by :ref:`filter_condition` elements.
+
+ * ```` - computes the logical negation of the operand
+
+The content for ```` is a single operand given by a :ref:`filter_condition` element.
+
+Examples
+^^^^^^^^
+
+* This filter uses ```` to combine a comparison predicate and a spatial predicate:
+
+.. code-block:: xml
+
+
+
+ NAME
+ New York
+
+
+ GEOMETRY
+
+
+ 1 1
+
+
+
+
+
+
+.. _filter_expression:
+
+Expression
+----------
+
+**Filter expressions** specify constant, variable or computed data values.
+An expression is formed from one of the following elements
+(some of which contain sub-expressions,
+meaning that expressions may be of arbitrary depth):
+
+Arithmetic operators
+^^^^^^^^^^^^^^^^^^^^
+
+The **arithmetic operator** elements compute arithmetic operations on numeric values.
+
+ * ```` - adds the two operands
+ * ```` - subtracts the second operand from the first
+ * ```` - multiplies the two operands
+ * ``
`` - divides the first operand by the second
+
+Each arithmetic operator element contains two :ref:`filter_expression` elements
+providing the operands.
+
+Function
+^^^^^^^^
+
+The ```` element specifies a filter function to be evaluated.
+The required ``name`` attribute gives the function name.
+The element contains a sequence of zero or more
+:ref:`filter_expression` elements providing the values of the function arguments.
+
+See the :ref:`filter_function_reference` for details of the functions provided by GeoServer.
+
+Property Value
+^^^^^^^^^^^^^^
+
+The ```` element refers to the value of a feature attribute.
+It contains a **string** or an **XPath expression** specifying the attribute name.
+
+Literal
+^^^^^^^
+
+The ```` element specifies a constant value.
+It contains data of one of the following types:
+
+.. list-table::
+ :widths: 25 75
+
+ * - **Type**
+ - **Description**
+ * - Numeric
+ - A string representing a numeric value (integer or decimal).
+ * - Boolean
+ - A boolean value of ``true`` or ``false``.
+ * - String
+ - A string value.
+ XML-incompatible text may be included by using
+ **character entities** or ```` delimiters.
+ * - Date
+ - A string representing a date.
+ * - Geometry
+ - An element specifying a geometry in GML3 format.
+
+WFS 2.0 namespaces
+------------------
+
+WFS 2.0 does not depend on any one GML version and thus requires an explicit namespace and schemaLocation for GML.
+In a GET request, namespaces can be placed on a Filter element (that is, ``filter=`` the block below, URL-encoded):
+
+.. code-block:: xml
+
+