UNPKG

1.37 MBJavaScriptView Raw
1(function webpackUniversalModuleDefinition(root, factory) {
2 if(typeof exports === 'object' && typeof module === 'object')
3 module.exports = factory();
4 else if(typeof define === 'function' && define.amd)
5 define([], factory);
6 else if(typeof exports === 'object')
7 exports["GeoRaster"] = factory();
8 else
9 root["GeoRaster"] = factory();
10})(typeof self !== 'undefined' ? self : this, function() {
11return /******/ (function(modules) { // webpackBootstrap
12/******/ // The module cache
13/******/ var installedModules = {};
14/******/
15/******/ // The require function
16/******/ function __webpack_require__(moduleId) {
17/******/
18/******/ // Check if module is in cache
19/******/ if(installedModules[moduleId]) {
20/******/ return installedModules[moduleId].exports;
21/******/ }
22/******/ // Create a new module (and put it into the cache)
23/******/ var module = installedModules[moduleId] = {
24/******/ i: moduleId,
25/******/ l: false,
26/******/ exports: {}
27/******/ };
28/******/
29/******/ // Execute the module function
30/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
31/******/
32/******/ // Flag the module as loaded
33/******/ module.l = true;
34/******/
35/******/ // Return the exports of the module
36/******/ return module.exports;
37/******/ }
38/******/
39/******/
40/******/ // expose the modules object (__webpack_modules__)
41/******/ __webpack_require__.m = modules;
42/******/
43/******/ // expose the module cache
44/******/ __webpack_require__.c = installedModules;
45/******/
46/******/ // define getter function for harmony exports
47/******/ __webpack_require__.d = function(exports, name, getter) {
48/******/ if(!__webpack_require__.o(exports, name)) {
49/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
50/******/ }
51/******/ };
52/******/
53/******/ // define __esModule on exports
54/******/ __webpack_require__.r = function(exports) {
55/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
56/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
57/******/ }
58/******/ Object.defineProperty(exports, '__esModule', { value: true });
59/******/ };
60/******/
61/******/ // create a fake namespace object
62/******/ // mode & 1: value is a module id, require it
63/******/ // mode & 2: merge all properties of value into the ns
64/******/ // mode & 4: return value when already ns object
65/******/ // mode & 8|1: behave like require
66/******/ __webpack_require__.t = function(value, mode) {
67/******/ if(mode & 1) value = __webpack_require__(value);
68/******/ if(mode & 8) return value;
69/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
70/******/ var ns = Object.create(null);
71/******/ __webpack_require__.r(ns);
72/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
73/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
74/******/ return ns;
75/******/ };
76/******/
77/******/ // getDefaultExport function for compatibility with non-harmony modules
78/******/ __webpack_require__.n = function(module) {
79/******/ var getter = module && module.__esModule ?
80/******/ function getDefault() { return module['default']; } :
81/******/ function getModuleExports() { return module; };
82/******/ __webpack_require__.d(getter, 'a', getter);
83/******/ return getter;
84/******/ };
85/******/
86/******/ // Object.prototype.hasOwnProperty.call
87/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
88/******/
89/******/ // __webpack_public_path__
90/******/ __webpack_require__.p = "";
91/******/
92/******/
93/******/ // Load entry module and return exports
94/******/ return __webpack_require__(__webpack_require__.s = "./src/index.js");
95/******/ })
96/************************************************************************/
97/******/ ({
98
99/***/ "./node_modules/callsites/index.js":
100/*!*****************************************!*\
101 !*** ./node_modules/callsites/index.js ***!
102 \*****************************************/
103/*! no static exports found */
104/***/ (function(module, exports, __webpack_require__) {
105
106"use strict";
107eval("\n\nconst callsites = () => {\n\tconst _prepareStackTrace = Error.prepareStackTrace;\n\tError.prepareStackTrace = (_, stack) => stack;\n\tconst stack = new Error().stack.slice(1);\n\tError.prepareStackTrace = _prepareStackTrace;\n\treturn stack;\n};\n\nmodule.exports = callsites;\n// TODO: Remove this for the next major release\nmodule.exports.default = callsites;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/callsites/index.js?");
108
109/***/ }),
110
111/***/ "./node_modules/core-util-is/lib/util.js":
112/*!***********************************************!*\
113 !*** ./node_modules/core-util-is/lib/util.js ***!
114 \***********************************************/
115/*! no static exports found */
116/***/ (function(module, exports, __webpack_require__) {
117
118eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// NOTE: These type checking functions intentionally don't use `instanceof`\n// because it is fragile and can be easily faked with `Object.create()`.\n\nfunction isArray(arg) {\n if (Array.isArray) {\n return Array.isArray(arg);\n }\n return objectToString(arg) === '[object Array]';\n}\nexports.isArray = isArray;\n\nfunction isBoolean(arg) {\n return typeof arg === 'boolean';\n}\nexports.isBoolean = isBoolean;\n\nfunction isNull(arg) {\n return arg === null;\n}\nexports.isNull = isNull;\n\nfunction isNullOrUndefined(arg) {\n return arg == null;\n}\nexports.isNullOrUndefined = isNullOrUndefined;\n\nfunction isNumber(arg) {\n return typeof arg === 'number';\n}\nexports.isNumber = isNumber;\n\nfunction isString(arg) {\n return typeof arg === 'string';\n}\nexports.isString = isString;\n\nfunction isSymbol(arg) {\n return typeof arg === 'symbol';\n}\nexports.isSymbol = isSymbol;\n\nfunction isUndefined(arg) {\n return arg === void 0;\n}\nexports.isUndefined = isUndefined;\n\nfunction isRegExp(re) {\n return objectToString(re) === '[object RegExp]';\n}\nexports.isRegExp = isRegExp;\n\nfunction isObject(arg) {\n return typeof arg === 'object' && arg !== null;\n}\nexports.isObject = isObject;\n\nfunction isDate(d) {\n return objectToString(d) === '[object Date]';\n}\nexports.isDate = isDate;\n\nfunction isError(e) {\n return (objectToString(e) === '[object Error]' || e instanceof Error);\n}\nexports.isError = isError;\n\nfunction isFunction(arg) {\n return typeof arg === 'function';\n}\nexports.isFunction = isFunction;\n\nfunction isPrimitive(arg) {\n return arg === null ||\n typeof arg === 'boolean' ||\n typeof arg === 'number' ||\n typeof arg === 'string' ||\n typeof arg === 'symbol' || // ES6 symbol\n typeof arg === 'undefined';\n}\nexports.isPrimitive = isPrimitive;\n\nexports.isBuffer = __webpack_require__(/*! buffer */ \"buffer\").Buffer.isBuffer;\n\nfunction objectToString(o) {\n return Object.prototype.toString.call(o);\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/core-util-is/lib/util.js?");
119
120/***/ }),
121
122/***/ "./node_modules/cross-fetch/dist/node-ponyfill.js":
123/*!********************************************************!*\
124 !*** ./node_modules/cross-fetch/dist/node-ponyfill.js ***!
125 \********************************************************/
126/*! no static exports found */
127/***/ (function(module, exports, __webpack_require__) {
128
129eval("const nodeFetch = __webpack_require__(/*! node-fetch */ \"./node_modules/node-fetch/lib/index.mjs\")\nconst realFetch = nodeFetch.default || nodeFetch\n\nconst fetch = function (url, options) {\n // Support schemaless URIs on the server for parity with the browser.\n // Ex: //github.com/ -> https://github.com/\n if (/^\\/\\//.test(url)) {\n url = 'https:' + url\n }\n return realFetch.call(this, url, options)\n}\n\nfetch.ponyfill = true\n\nmodule.exports = exports = fetch\nexports.fetch = fetch\nexports.Headers = nodeFetch.Headers\nexports.Request = nodeFetch.Request\nexports.Response = nodeFetch.Response\n\n// Needed for TypeScript consumers without esModuleInterop.\nexports.default = fetch\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/cross-fetch/dist/node-ponyfill.js?");
130
131/***/ }),
132
133/***/ "./node_modules/georaster-to-canvas/index.js":
134/*!***************************************************!*\
135 !*** ./node_modules/georaster-to-canvas/index.js ***!
136 \***************************************************/
137/*! exports provided: default */
138/***/ (function(module, __webpack_exports__, __webpack_require__) {
139
140"use strict";
141eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return toCanvas; });\n/* global ImageData */\n\nfunction toImageData(georaster, canvasWidth, canvasHeight) {\n if (georaster.values) {\n const { noDataValue, mins, ranges, values } = georaster;\n const numBands = values.length;\n const xRatio = georaster.width / canvasWidth;\n const yRatio = georaster.height / canvasHeight;\n const data = new Uint8ClampedArray(canvasWidth * canvasHeight * 4);\n for (let rowIndex = 0; rowIndex < canvasHeight; rowIndex++) {\n for (let columnIndex = 0; columnIndex < canvasWidth; columnIndex++) {\n const rasterRowIndex = Math.round(rowIndex * yRatio);\n const rasterColumnIndex = Math.round(columnIndex * xRatio);\n const pixelValues = values.map(band => {\n try {\n return band[rasterRowIndex][rasterColumnIndex];\n } catch (error) {\n console.error(error);\n }\n });\n const haveDataForAllBands = pixelValues.every(value => value !== undefined && value !== noDataValue);\n if (haveDataForAllBands) {\n const i = (rowIndex * (canvasWidth * 4)) + 4 * columnIndex;\n if (numBands === 1) {\n const pixelValue = Math.round(pixelValues[0]);\n const scaledPixelValue = Math.round((pixelValue - mins[0]) / ranges[0] * 255);\n data[i] = scaledPixelValue;\n data[i + 1] = scaledPixelValue;\n data[i + 2] = scaledPixelValue;\n data[i + 3] = 255;\n } else if (numBands === 3) {\n try {\n const [r, g, b] = pixelValues;\n data[i] = r;\n data[i + 1] = g;\n data[i + 2] = b;\n data[i + 3] = 255;\n } catch (error) {\n console.error(error);\n }\n } else if (numBands === 4) {\n try {\n const [r, g, b, a] = pixelValues;\n data[i] = r;\n data[i + 1] = g;\n data[i + 2] = b;\n data[i + 3] = a;\n } catch (error) {\n console.error(error);\n }\n }\n }\n }\n }\n return new ImageData(data, canvasWidth, canvasHeight);\n }\n}\n\nfunction toCanvas(georaster, options) {\n if (typeof ImageData === \"undefined\") {\n throw `toCanvas is not supported in your environment`;\n } else {\n const canvas = document.createElement(\"CANVAS\");\n const canvasHeight = options && options.height ? Math.min(georaster.height, options.height) : Math.min(georaster.height, 100);\n const canvasWidth = options && options.width ? Math.min(georaster.width, options.width) : Math.min(georaster.width, 100);\n canvas.height = canvasHeight;\n canvas.width = canvasWidth;\n canvas.style.minHeight = \"200px\";\n canvas.style.minWidth = \"400px\";\n canvas.style.maxWidth = \"100%\";\n const context = canvas.getContext(\"2d\");\n const imageData = toImageData(georaster, canvasWidth, canvasHeight);\n context.putImageData(imageData, 0, 0);\n return canvas;\n }\n}\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/georaster-to-canvas/index.js?");
142
143/***/ }),
144
145/***/ "./node_modules/geotiff-palette/index.js":
146/*!***********************************************!*\
147 !*** ./node_modules/geotiff-palette/index.js ***!
148 \***********************************************/
149/*! no static exports found */
150/***/ (function(module, exports) {
151
152eval("const getPalette = (image, { debug = false } = { debug: false }) => {\n if (debug) console.log(\"starting getPalette with image\", image);\n const { fileDirectory } = image;\n const {\n BitsPerSample,\n ColorMap,\n ImageLength,\n ImageWidth,\n PhotometricInterpretation,\n SampleFormat,\n SamplesPerPixel\n } = fileDirectory;\n\n if (!ColorMap) {\n throw new Error(\"[geotiff-palette]: the image does not contain a color map, so we can't make a palette.\");\n }\n\n const count = Math.pow(2, BitsPerSample);\n if (debug) console.log(\"[geotiff-palette]: count:\", count);\n\n const bandSize = ColorMap.length / 3;\n if (debug) console.log(\"[geotiff-palette]: bandSize:\", bandSize);\n\n if (bandSize !== count) {\n throw new Error(\"[geotiff-palette]: can't handle situations where the color map has more or less values than the number of possible values in a raster\");\n }\n\n const greenOffset = bandSize;\n const redOffset = greenOffset + bandSize;\n\n const result = [];\n for (let i = 0; i < count; i++) {\n // colorMap[mapIndex] / 65536 * 256 equals colorMap[mapIndex] / 256\n // because (1 / 2^16) * (2^8) equals 1 / 2^8\n result.push([\n Math.floor(ColorMap[i] / 256), // red\n Math.floor(ColorMap[greenOffset + i] / 256), // green\n Math.floor(ColorMap[redOffset + i] / 256), // blue\n 255 // alpha value is always 255\n ]);\n }\n if (debug) console.log(\"[geotiff-palette]: result is \", result);\n return result;\n}\n\nmodule.exports = { getPalette };\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff-palette/index.js?");
153
154/***/ }),
155
156/***/ "./node_modules/geotiff/src/compression/basedecoder.js":
157/*!*************************************************************!*\
158 !*** ./node_modules/geotiff/src/compression/basedecoder.js ***!
159 \*************************************************************/
160/*! exports provided: default */
161/***/ (function(module, __webpack_exports__, __webpack_require__) {
162
163"use strict";
164eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return BaseDecoder; });\n/* harmony import */ var _predictor__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../predictor */ \"./node_modules/geotiff/src/predictor.js\");\n\n\nclass BaseDecoder {\n decode(fileDirectory, buffer) {\n const decoded = this.decodeBlock(buffer);\n const predictor = fileDirectory.Predictor || 1;\n if (predictor !== 1) {\n const isTiled = !fileDirectory.StripOffsets;\n const tileWidth = isTiled ? fileDirectory.TileWidth : fileDirectory.ImageWidth;\n const tileHeight = isTiled ? fileDirectory.TileLength : (\n fileDirectory.RowsPerStrip || fileDirectory.ImageLength\n );\n return Object(_predictor__WEBPACK_IMPORTED_MODULE_0__[\"applyPredictor\"])(\n decoded, predictor, tileWidth, tileHeight, fileDirectory.BitsPerSample,\n fileDirectory.PlanarConfiguration,\n );\n }\n return decoded;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/basedecoder.js?");
165
166/***/ }),
167
168/***/ "./node_modules/geotiff/src/compression/deflate.js":
169/*!*********************************************************!*\
170 !*** ./node_modules/geotiff/src/compression/deflate.js ***!
171 \*********************************************************/
172/*! exports provided: default */
173/***/ (function(module, __webpack_exports__, __webpack_require__) {
174
175"use strict";
176eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return DeflateDecoder; });\n/* harmony import */ var pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! pako/lib/inflate */ \"./node_modules/pako/lib/inflate.js\");\n/* harmony import */ var pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./basedecoder */ \"./node_modules/geotiff/src/compression/basedecoder.js\");\n\n\n\nclass DeflateDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_1__[\"default\"] {\n decodeBlock(buffer) {\n return Object(pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__[\"inflate\"])(new Uint8Array(buffer)).buffer;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/deflate.js?");
177
178/***/ }),
179
180/***/ "./node_modules/geotiff/src/compression/index.js":
181/*!*******************************************************!*\
182 !*** ./node_modules/geotiff/src/compression/index.js ***!
183 \*******************************************************/
184/*! exports provided: getDecoder */
185/***/ (function(module, __webpack_exports__, __webpack_require__) {
186
187"use strict";
188eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getDecoder\", function() { return getDecoder; });\n/* harmony import */ var _raw__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./raw */ \"./node_modules/geotiff/src/compression/raw.js\");\n/* harmony import */ var _lzw__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./lzw */ \"./node_modules/geotiff/src/compression/lzw.js\");\n/* harmony import */ var _jpeg__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./jpeg */ \"./node_modules/geotiff/src/compression/jpeg.js\");\n/* harmony import */ var _deflate__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./deflate */ \"./node_modules/geotiff/src/compression/deflate.js\");\n/* harmony import */ var _packbits__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./packbits */ \"./node_modules/geotiff/src/compression/packbits.js\");\n\n\n\n\n\n\nfunction getDecoder(fileDirectory) {\n switch (fileDirectory.Compression) {\n case undefined:\n case 1: // no compression\n return new _raw__WEBPACK_IMPORTED_MODULE_0__[\"default\"]();\n case 5: // LZW\n return new _lzw__WEBPACK_IMPORTED_MODULE_1__[\"default\"]();\n case 6: // JPEG\n throw new Error('old style JPEG compression is not supported.');\n case 7: // JPEG\n return new _jpeg__WEBPACK_IMPORTED_MODULE_2__[\"default\"](fileDirectory);\n case 8: // Deflate as recognized by Adobe\n case 32946: // Deflate GDAL default\n return new _deflate__WEBPACK_IMPORTED_MODULE_3__[\"default\"]();\n case 32773: // packbits\n return new _packbits__WEBPACK_IMPORTED_MODULE_4__[\"default\"]();\n default:\n throw new Error(`Unknown compression method identifier: ${fileDirectory.Compression}`);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/index.js?");
189
190/***/ }),
191
192/***/ "./node_modules/geotiff/src/compression/jpeg.js":
193/*!******************************************************!*\
194 !*** ./node_modules/geotiff/src/compression/jpeg.js ***!
195 \******************************************************/
196/*! exports provided: default */
197/***/ (function(module, __webpack_exports__, __webpack_require__) {
198
199"use strict";
200eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return JpegDecoder; });\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \"./node_modules/geotiff/src/compression/basedecoder.js\");\n\n\n/* -*- tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- /\n/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */\n/*\n Copyright 2011 notmasteryet\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n http://www.apache.org/licenses/LICENSE-2.0\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*/\n\n// - The JPEG specification can be found in the ITU CCITT Recommendation T.81\n// (www.w3.org/Graphics/JPEG/itu-t81.pdf)\n// - The JFIF specification can be found in the JPEG File Interchange Format\n// (www.w3.org/Graphics/JPEG/jfif3.pdf)\n// - The Adobe Application-Specific JPEG markers in the Supporting the DCT Filters\n// in PostScript Level 2, Technical Note #5116\n// (partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf)\n\n\nconst dctZigZag = new Int32Array([\n 0,\n 1, 8,\n 16, 9, 2,\n 3, 10, 17, 24,\n 32, 25, 18, 11, 4,\n 5, 12, 19, 26, 33, 40,\n 48, 41, 34, 27, 20, 13, 6,\n 7, 14, 21, 28, 35, 42, 49, 56,\n 57, 50, 43, 36, 29, 22, 15,\n 23, 30, 37, 44, 51, 58,\n 59, 52, 45, 38, 31,\n 39, 46, 53, 60,\n 61, 54, 47,\n 55, 62,\n 63,\n]);\n\nconst dctCos1 = 4017; // cos(pi/16)\nconst dctSin1 = 799; // sin(pi/16)\nconst dctCos3 = 3406; // cos(3*pi/16)\nconst dctSin3 = 2276; // sin(3*pi/16)\nconst dctCos6 = 1567; // cos(6*pi/16)\nconst dctSin6 = 3784; // sin(6*pi/16)\nconst dctSqrt2 = 5793; // sqrt(2)\nconst dctSqrt1d2 = 2896;// sqrt(2) / 2\n\nfunction buildHuffmanTable(codeLengths, values) {\n let k = 0;\n const code = [];\n let length = 16;\n while (length > 0 && !codeLengths[length - 1]) {\n --length;\n }\n code.push({ children: [], index: 0 });\n\n let p = code[0];\n let q;\n for (let i = 0; i < length; i++) {\n for (let j = 0; j < codeLengths[i]; j++) {\n p = code.pop();\n p.children[p.index] = values[k];\n while (p.index > 0) {\n p = code.pop();\n }\n p.index++;\n code.push(p);\n while (code.length <= i) {\n code.push(q = { children: [], index: 0 });\n p.children[p.index] = q.children;\n p = q;\n }\n k++;\n }\n if (i + 1 < length) {\n // p here points to last code\n code.push(q = { children: [], index: 0 });\n p.children[p.index] = q.children;\n p = q;\n }\n }\n return code[0].children;\n}\n\nfunction decodeScan(data, initialOffset,\n frame, components, resetInterval,\n spectralStart, spectralEnd,\n successivePrev, successive) {\n const { mcusPerLine, progressive } = frame;\n\n const startOffset = initialOffset;\n let offset = initialOffset;\n let bitsData = 0;\n let bitsCount = 0;\n function readBit() {\n if (bitsCount > 0) {\n bitsCount--;\n return (bitsData >> bitsCount) & 1;\n }\n bitsData = data[offset++];\n if (bitsData === 0xFF) {\n const nextByte = data[offset++];\n if (nextByte) {\n throw new Error(`unexpected marker: ${((bitsData << 8) | nextByte).toString(16)}`);\n }\n // unstuff 0\n }\n bitsCount = 7;\n return bitsData >>> 7;\n }\n function decodeHuffman(tree) {\n let node = tree;\n let bit;\n while ((bit = readBit()) !== null) { // eslint-disable-line no-cond-assign\n node = node[bit];\n if (typeof node === 'number') {\n return node;\n }\n if (typeof node !== 'object') {\n throw new Error('invalid huffman sequence');\n }\n }\n return null;\n }\n function receive(initialLength) {\n let length = initialLength;\n let n = 0;\n while (length > 0) {\n const bit = readBit();\n if (bit === null) {\n return undefined;\n }\n n = (n << 1) | bit;\n --length;\n }\n return n;\n }\n function receiveAndExtend(length) {\n const n = receive(length);\n if (n >= 1 << (length - 1)) {\n return n;\n }\n return n + (-1 << length) + 1;\n }\n function decodeBaseline(component, zz) {\n const t = decodeHuffman(component.huffmanTableDC);\n const diff = t === 0 ? 0 : receiveAndExtend(t);\n component.pred += diff;\n zz[0] = component.pred;\n let k = 1;\n while (k < 64) {\n const rs = decodeHuffman(component.huffmanTableAC);\n const s = rs & 15;\n const r = rs >> 4;\n if (s === 0) {\n if (r < 15) {\n break;\n }\n k += 16;\n } else {\n k += r;\n const z = dctZigZag[k];\n zz[z] = receiveAndExtend(s);\n k++;\n }\n }\n }\n function decodeDCFirst(component, zz) {\n const t = decodeHuffman(component.huffmanTableDC);\n const diff = t === 0 ? 0 : (receiveAndExtend(t) << successive);\n component.pred += diff;\n zz[0] = component.pred;\n }\n function decodeDCSuccessive(component, zz) {\n zz[0] |= readBit() << successive;\n }\n let eobrun = 0;\n function decodeACFirst(component, zz) {\n if (eobrun > 0) {\n eobrun--;\n return;\n }\n let k = spectralStart;\n const e = spectralEnd;\n while (k <= e) {\n const rs = decodeHuffman(component.huffmanTableAC);\n const s = rs & 15;\n const r = rs >> 4;\n if (s === 0) {\n if (r < 15) {\n eobrun = receive(r) + (1 << r) - 1;\n break;\n }\n k += 16;\n } else {\n k += r;\n const z = dctZigZag[k];\n zz[z] = receiveAndExtend(s) * (1 << successive);\n k++;\n }\n }\n }\n let successiveACState = 0;\n let successiveACNextValue;\n function decodeACSuccessive(component, zz) {\n let k = spectralStart;\n const e = spectralEnd;\n let r = 0;\n while (k <= e) {\n const z = dctZigZag[k];\n const direction = zz[z] < 0 ? -1 : 1;\n switch (successiveACState) {\n case 0: { // initial state\n const rs = decodeHuffman(component.huffmanTableAC);\n const s = rs & 15;\n r = rs >> 4;\n if (s === 0) {\n if (r < 15) {\n eobrun = receive(r) + (1 << r);\n successiveACState = 4;\n } else {\n r = 16;\n successiveACState = 1;\n }\n } else {\n if (s !== 1) {\n throw new Error('invalid ACn encoding');\n }\n successiveACNextValue = receiveAndExtend(s);\n successiveACState = r ? 2 : 3;\n }\n continue; // eslint-disable-line no-continue\n }\n case 1: // skipping r zero items\n case 2:\n if (zz[z]) {\n zz[z] += (readBit() << successive) * direction;\n } else {\n r--;\n if (r === 0) {\n successiveACState = successiveACState === 2 ? 3 : 0;\n }\n }\n break;\n case 3: // set value for a zero item\n if (zz[z]) {\n zz[z] += (readBit() << successive) * direction;\n } else {\n zz[z] = successiveACNextValue << successive;\n successiveACState = 0;\n }\n break;\n case 4: // eob\n if (zz[z]) {\n zz[z] += (readBit() << successive) * direction;\n }\n break;\n default:\n break;\n }\n k++;\n }\n if (successiveACState === 4) {\n eobrun--;\n if (eobrun === 0) {\n successiveACState = 0;\n }\n }\n }\n function decodeMcu(component, decodeFunction, mcu, row, col) {\n const mcuRow = (mcu / mcusPerLine) | 0;\n const mcuCol = mcu % mcusPerLine;\n const blockRow = (mcuRow * component.v) + row;\n const blockCol = (mcuCol * component.h) + col;\n decodeFunction(component, component.blocks[blockRow][blockCol]);\n }\n function decodeBlock(component, decodeFunction, mcu) {\n const blockRow = (mcu / component.blocksPerLine) | 0;\n const blockCol = mcu % component.blocksPerLine;\n decodeFunction(component, component.blocks[blockRow][blockCol]);\n }\n\n const componentsLength = components.length;\n let component;\n let i;\n let j;\n let k;\n let n;\n let decodeFn;\n if (progressive) {\n if (spectralStart === 0) {\n decodeFn = successivePrev === 0 ? decodeDCFirst : decodeDCSuccessive;\n } else {\n decodeFn = successivePrev === 0 ? decodeACFirst : decodeACSuccessive;\n }\n } else {\n decodeFn = decodeBaseline;\n }\n\n let mcu = 0;\n let marker;\n let mcuExpected;\n if (componentsLength === 1) {\n mcuExpected = components[0].blocksPerLine * components[0].blocksPerColumn;\n } else {\n mcuExpected = mcusPerLine * frame.mcusPerColumn;\n }\n\n const usedResetInterval = resetInterval || mcuExpected;\n\n while (mcu < mcuExpected) {\n // reset interval stuff\n for (i = 0; i < componentsLength; i++) {\n components[i].pred = 0;\n }\n eobrun = 0;\n\n if (componentsLength === 1) {\n component = components[0];\n for (n = 0; n < usedResetInterval; n++) {\n decodeBlock(component, decodeFn, mcu);\n mcu++;\n }\n } else {\n for (n = 0; n < usedResetInterval; n++) {\n for (i = 0; i < componentsLength; i++) {\n component = components[i];\n const { h, v } = component;\n for (j = 0; j < v; j++) {\n for (k = 0; k < h; k++) {\n decodeMcu(component, decodeFn, mcu, j, k);\n }\n }\n }\n mcu++;\n\n // If we've reached our expected MCU's, stop decoding\n if (mcu === mcuExpected) {\n break;\n }\n }\n }\n\n // find marker\n bitsCount = 0;\n marker = (data[offset] << 8) | data[offset + 1];\n if (marker < 0xFF00) {\n throw new Error('marker was not found');\n }\n\n if (marker >= 0xFFD0 && marker <= 0xFFD7) { // RSTx\n offset += 2;\n } else {\n break;\n }\n }\n\n return offset - startOffset;\n}\n\nfunction buildComponentData(frame, component) {\n const lines = [];\n const { blocksPerLine, blocksPerColumn } = component;\n const samplesPerLine = blocksPerLine << 3;\n const R = new Int32Array(64);\n const r = new Uint8Array(64);\n\n // A port of poppler's IDCT method which in turn is taken from:\n // Christoph Loeffler, Adriaan Ligtenberg, George S. Moschytz,\n // \"Practical Fast 1-D DCT Algorithms with 11 Multiplications\",\n // IEEE Intl. Conf. on Acoustics, Speech & Signal Processing, 1989,\n // 988-991.\n function quantizeAndInverse(zz, dataOut, dataIn) {\n const qt = component.quantizationTable;\n let v0;\n let v1;\n let v2;\n let v3;\n let v4;\n let v5;\n let v6;\n let v7;\n let t;\n const p = dataIn;\n let i;\n\n // dequant\n for (i = 0; i < 64; i++) {\n p[i] = zz[i] * qt[i];\n }\n\n // inverse DCT on rows\n for (i = 0; i < 8; ++i) {\n const row = 8 * i;\n\n // check for all-zero AC coefficients\n if (p[1 + row] === 0 && p[2 + row] === 0 && p[3 + row] === 0\n && p[4 + row] === 0 && p[5 + row] === 0 && p[6 + row] === 0\n && p[7 + row] === 0) {\n t = ((dctSqrt2 * p[0 + row]) + 512) >> 10;\n p[0 + row] = t;\n p[1 + row] = t;\n p[2 + row] = t;\n p[3 + row] = t;\n p[4 + row] = t;\n p[5 + row] = t;\n p[6 + row] = t;\n p[7 + row] = t;\n continue; // eslint-disable-line no-continue\n }\n\n // stage 4\n v0 = ((dctSqrt2 * p[0 + row]) + 128) >> 8;\n v1 = ((dctSqrt2 * p[4 + row]) + 128) >> 8;\n v2 = p[2 + row];\n v3 = p[6 + row];\n v4 = ((dctSqrt1d2 * (p[1 + row] - p[7 + row])) + 128) >> 8;\n v7 = ((dctSqrt1d2 * (p[1 + row] + p[7 + row])) + 128) >> 8;\n v5 = p[3 + row] << 4;\n v6 = p[5 + row] << 4;\n\n // stage 3\n t = (v0 - v1 + 1) >> 1;\n v0 = (v0 + v1 + 1) >> 1;\n v1 = t;\n t = ((v2 * dctSin6) + (v3 * dctCos6) + 128) >> 8;\n v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 128) >> 8;\n v3 = t;\n t = (v4 - v6 + 1) >> 1;\n v4 = (v4 + v6 + 1) >> 1;\n v6 = t;\n t = (v7 + v5 + 1) >> 1;\n v5 = (v7 - v5 + 1) >> 1;\n v7 = t;\n\n // stage 2\n t = (v0 - v3 + 1) >> 1;\n v0 = (v0 + v3 + 1) >> 1;\n v3 = t;\n t = (v1 - v2 + 1) >> 1;\n v1 = (v1 + v2 + 1) >> 1;\n v2 = t;\n t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;\n v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;\n v7 = t;\n t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;\n v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;\n v6 = t;\n\n // stage 1\n p[0 + row] = v0 + v7;\n p[7 + row] = v0 - v7;\n p[1 + row] = v1 + v6;\n p[6 + row] = v1 - v6;\n p[2 + row] = v2 + v5;\n p[5 + row] = v2 - v5;\n p[3 + row] = v3 + v4;\n p[4 + row] = v3 - v4;\n }\n\n // inverse DCT on columns\n for (i = 0; i < 8; ++i) {\n const col = i;\n\n // check for all-zero AC coefficients\n if (p[(1 * 8) + col] === 0 && p[(2 * 8) + col] === 0 && p[(3 * 8) + col] === 0\n && p[(4 * 8) + col] === 0 && p[(5 * 8) + col] === 0 && p[(6 * 8) + col] === 0\n && p[(7 * 8) + col] === 0) {\n t = ((dctSqrt2 * dataIn[i + 0]) + 8192) >> 14;\n p[(0 * 8) + col] = t;\n p[(1 * 8) + col] = t;\n p[(2 * 8) + col] = t;\n p[(3 * 8) + col] = t;\n p[(4 * 8) + col] = t;\n p[(5 * 8) + col] = t;\n p[(6 * 8) + col] = t;\n p[(7 * 8) + col] = t;\n continue; // eslint-disable-line no-continue\n }\n\n // stage 4\n v0 = ((dctSqrt2 * p[(0 * 8) + col]) + 2048) >> 12;\n v1 = ((dctSqrt2 * p[(4 * 8) + col]) + 2048) >> 12;\n v2 = p[(2 * 8) + col];\n v3 = p[(6 * 8) + col];\n v4 = ((dctSqrt1d2 * (p[(1 * 8) + col] - p[(7 * 8) + col])) + 2048) >> 12;\n v7 = ((dctSqrt1d2 * (p[(1 * 8) + col] + p[(7 * 8) + col])) + 2048) >> 12;\n v5 = p[(3 * 8) + col];\n v6 = p[(5 * 8) + col];\n\n // stage 3\n t = (v0 - v1 + 1) >> 1;\n v0 = (v0 + v1 + 1) >> 1;\n v1 = t;\n t = ((v2 * dctSin6) + (v3 * dctCos6) + 2048) >> 12;\n v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 2048) >> 12;\n v3 = t;\n t = (v4 - v6 + 1) >> 1;\n v4 = (v4 + v6 + 1) >> 1;\n v6 = t;\n t = (v7 + v5 + 1) >> 1;\n v5 = (v7 - v5 + 1) >> 1;\n v7 = t;\n\n // stage 2\n t = (v0 - v3 + 1) >> 1;\n v0 = (v0 + v3 + 1) >> 1;\n v3 = t;\n t = (v1 - v2 + 1) >> 1;\n v1 = (v1 + v2 + 1) >> 1;\n v2 = t;\n t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;\n v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;\n v7 = t;\n t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;\n v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;\n v6 = t;\n\n // stage 1\n p[(0 * 8) + col] = v0 + v7;\n p[(7 * 8) + col] = v0 - v7;\n p[(1 * 8) + col] = v1 + v6;\n p[(6 * 8) + col] = v1 - v6;\n p[(2 * 8) + col] = v2 + v5;\n p[(5 * 8) + col] = v2 - v5;\n p[(3 * 8) + col] = v3 + v4;\n p[(4 * 8) + col] = v3 - v4;\n }\n\n // convert to 8-bit integers\n for (i = 0; i < 64; ++i) {\n const sample = 128 + ((p[i] + 8) >> 4);\n if (sample < 0) {\n dataOut[i] = 0;\n } else if (sample > 0XFF) {\n dataOut[i] = 0xFF;\n } else {\n dataOut[i] = sample;\n }\n }\n }\n\n for (let blockRow = 0; blockRow < blocksPerColumn; blockRow++) {\n const scanLine = blockRow << 3;\n for (let i = 0; i < 8; i++) {\n lines.push(new Uint8Array(samplesPerLine));\n }\n for (let blockCol = 0; blockCol < blocksPerLine; blockCol++) {\n quantizeAndInverse(component.blocks[blockRow][blockCol], r, R);\n\n let offset = 0;\n const sample = blockCol << 3;\n for (let j = 0; j < 8; j++) {\n const line = lines[scanLine + j];\n for (let i = 0; i < 8; i++) {\n line[sample + i] = r[offset++];\n }\n }\n }\n }\n return lines;\n}\n\nclass JpegStreamReader {\n constructor() {\n this.jfif = null;\n this.adobe = null;\n\n this.quantizationTables = [];\n this.huffmanTablesAC = [];\n this.huffmanTablesDC = [];\n this.resetFrames();\n }\n\n resetFrames() {\n this.frames = [];\n }\n\n parse(data) {\n let offset = 0;\n // const { length } = data;\n function readUint16() {\n const value = (data[offset] << 8) | data[offset + 1];\n offset += 2;\n return value;\n }\n function readDataBlock() {\n const length = readUint16();\n const array = data.subarray(offset, offset + length - 2);\n offset += array.length;\n return array;\n }\n function prepareComponents(frame) {\n let maxH = 0;\n let maxV = 0;\n let component;\n let componentId;\n for (componentId in frame.components) {\n if (frame.components.hasOwnProperty(componentId)) {\n component = frame.components[componentId];\n if (maxH < component.h) {\n maxH = component.h;\n }\n if (maxV < component.v) {\n maxV = component.v;\n }\n }\n }\n const mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / maxH);\n const mcusPerColumn = Math.ceil(frame.scanLines / 8 / maxV);\n for (componentId in frame.components) {\n if (frame.components.hasOwnProperty(componentId)) {\n component = frame.components[componentId];\n const blocksPerLine = Math.ceil(Math.ceil(frame.samplesPerLine / 8) * component.h / maxH);\n const blocksPerColumn = Math.ceil(Math.ceil(frame.scanLines / 8) * component.v / maxV);\n const blocksPerLineForMcu = mcusPerLine * component.h;\n const blocksPerColumnForMcu = mcusPerColumn * component.v;\n const blocks = [];\n for (let i = 0; i < blocksPerColumnForMcu; i++) {\n const row = [];\n for (let j = 0; j < blocksPerLineForMcu; j++) {\n row.push(new Int32Array(64));\n }\n blocks.push(row);\n }\n component.blocksPerLine = blocksPerLine;\n component.blocksPerColumn = blocksPerColumn;\n component.blocks = blocks;\n }\n }\n frame.maxH = maxH;\n frame.maxV = maxV;\n frame.mcusPerLine = mcusPerLine;\n frame.mcusPerColumn = mcusPerColumn;\n }\n\n let fileMarker = readUint16();\n if (fileMarker !== 0xFFD8) { // SOI (Start of Image)\n throw new Error('SOI not found');\n }\n\n fileMarker = readUint16();\n while (fileMarker !== 0xFFD9) { // EOI (End of image)\n switch (fileMarker) {\n case 0xFF00: break;\n case 0xFFE0: // APP0 (Application Specific)\n case 0xFFE1: // APP1\n case 0xFFE2: // APP2\n case 0xFFE3: // APP3\n case 0xFFE4: // APP4\n case 0xFFE5: // APP5\n case 0xFFE6: // APP6\n case 0xFFE7: // APP7\n case 0xFFE8: // APP8\n case 0xFFE9: // APP9\n case 0xFFEA: // APP10\n case 0xFFEB: // APP11\n case 0xFFEC: // APP12\n case 0xFFED: // APP13\n case 0xFFEE: // APP14\n case 0xFFEF: // APP15\n case 0xFFFE: { // COM (Comment)\n const appData = readDataBlock();\n\n if (fileMarker === 0xFFE0) {\n if (appData[0] === 0x4A && appData[1] === 0x46 && appData[2] === 0x49\n && appData[3] === 0x46 && appData[4] === 0) { // 'JFIF\\x00'\n this.jfif = {\n version: { major: appData[5], minor: appData[6] },\n densityUnits: appData[7],\n xDensity: (appData[8] << 8) | appData[9],\n yDensity: (appData[10] << 8) | appData[11],\n thumbWidth: appData[12],\n thumbHeight: appData[13],\n thumbData: appData.subarray(14, 14 + (3 * appData[12] * appData[13])),\n };\n }\n }\n // TODO APP1 - Exif\n if (fileMarker === 0xFFEE) {\n if (appData[0] === 0x41 && appData[1] === 0x64 && appData[2] === 0x6F\n && appData[3] === 0x62 && appData[4] === 0x65 && appData[5] === 0) { // 'Adobe\\x00'\n this.adobe = {\n version: appData[6],\n flags0: (appData[7] << 8) | appData[8],\n flags1: (appData[9] << 8) | appData[10],\n transformCode: appData[11],\n };\n }\n }\n break;\n }\n\n case 0xFFDB: { // DQT (Define Quantization Tables)\n const quantizationTablesLength = readUint16();\n const quantizationTablesEnd = quantizationTablesLength + offset - 2;\n while (offset < quantizationTablesEnd) {\n const quantizationTableSpec = data[offset++];\n const tableData = new Int32Array(64);\n if ((quantizationTableSpec >> 4) === 0) { // 8 bit values\n for (let j = 0; j < 64; j++) {\n const z = dctZigZag[j];\n tableData[z] = data[offset++];\n }\n } else if ((quantizationTableSpec >> 4) === 1) { // 16 bit\n for (let j = 0; j < 64; j++) {\n const z = dctZigZag[j];\n tableData[z] = readUint16();\n }\n } else {\n throw new Error('DQT: invalid table spec');\n }\n this.quantizationTables[quantizationTableSpec & 15] = tableData;\n }\n break;\n }\n\n case 0xFFC0: // SOF0 (Start of Frame, Baseline DCT)\n case 0xFFC1: // SOF1 (Start of Frame, Extended DCT)\n case 0xFFC2: { // SOF2 (Start of Frame, Progressive DCT)\n readUint16(); // skip data length\n const frame = {\n extended: (fileMarker === 0xFFC1),\n progressive: (fileMarker === 0xFFC2),\n precision: data[offset++],\n scanLines: readUint16(),\n samplesPerLine: readUint16(),\n components: {},\n componentsOrder: [],\n };\n\n const componentsCount = data[offset++];\n let componentId;\n // let maxH = 0;\n // let maxV = 0;\n for (let i = 0; i < componentsCount; i++) {\n componentId = data[offset];\n const h = data[offset + 1] >> 4;\n const v = data[offset + 1] & 15;\n const qId = data[offset + 2];\n frame.componentsOrder.push(componentId);\n frame.components[componentId] = {\n h,\n v,\n quantizationIdx: qId,\n };\n offset += 3;\n }\n prepareComponents(frame);\n this.frames.push(frame);\n break;\n }\n\n case 0xFFC4: { // DHT (Define Huffman Tables)\n const huffmanLength = readUint16();\n for (let i = 2; i < huffmanLength;) {\n const huffmanTableSpec = data[offset++];\n const codeLengths = new Uint8Array(16);\n let codeLengthSum = 0;\n for (let j = 0; j < 16; j++, offset++) {\n codeLengths[j] = data[offset];\n codeLengthSum += codeLengths[j];\n }\n const huffmanValues = new Uint8Array(codeLengthSum);\n for (let j = 0; j < codeLengthSum; j++, offset++) {\n huffmanValues[j] = data[offset];\n }\n i += 17 + codeLengthSum;\n\n if ((huffmanTableSpec >> 4) === 0) {\n this.huffmanTablesDC[huffmanTableSpec & 15] = buildHuffmanTable(\n codeLengths, huffmanValues,\n );\n } else {\n this.huffmanTablesAC[huffmanTableSpec & 15] = buildHuffmanTable(\n codeLengths, huffmanValues,\n );\n }\n }\n break;\n }\n\n case 0xFFDD: // DRI (Define Restart Interval)\n readUint16(); // skip data length\n this.resetInterval = readUint16();\n break;\n\n case 0xFFDA: { // SOS (Start of Scan)\n readUint16(); // skip length\n const selectorsCount = data[offset++];\n const components = [];\n const frame = this.frames[0];\n for (let i = 0; i < selectorsCount; i++) {\n const component = frame.components[data[offset++]];\n const tableSpec = data[offset++];\n component.huffmanTableDC = this.huffmanTablesDC[tableSpec >> 4];\n component.huffmanTableAC = this.huffmanTablesAC[tableSpec & 15];\n components.push(component);\n }\n const spectralStart = data[offset++];\n const spectralEnd = data[offset++];\n const successiveApproximation = data[offset++];\n const processed = decodeScan(data, offset,\n frame, components, this.resetInterval,\n spectralStart, spectralEnd,\n successiveApproximation >> 4, successiveApproximation & 15);\n offset += processed;\n break;\n }\n\n case 0xFFFF: // Fill bytes\n if (data[offset] !== 0xFF) { // Avoid skipping a valid marker.\n offset--;\n }\n break;\n\n default:\n if (data[offset - 3] === 0xFF\n && data[offset - 2] >= 0xC0 && data[offset - 2] <= 0xFE) {\n // could be incorrect encoding -- last 0xFF byte of the previous\n // block was eaten by the encoder\n offset -= 3;\n break;\n }\n throw new Error(`unknown JPEG marker ${fileMarker.toString(16)}`);\n }\n fileMarker = readUint16();\n }\n }\n\n getResult() {\n const { frames } = this;\n if (this.frames.length === 0) {\n throw new Error('no frames were decoded');\n } else if (this.frames.length > 1) {\n console.warn('more than one frame is not supported');\n }\n\n // set each frame's components quantization table\n for (let i = 0; i < this.frames.length; i++) {\n const cp = this.frames[i].components;\n for (const j of Object.keys(cp)) {\n cp[j].quantizationTable = this.quantizationTables[cp[j].quantizationIdx];\n delete cp[j].quantizationIdx;\n }\n }\n\n const frame = frames[0];\n const { components, componentsOrder } = frame;\n const outComponents = [];\n const width = frame.samplesPerLine;\n const height = frame.scanLines;\n\n for (let i = 0; i < componentsOrder.length; i++) {\n const component = components[componentsOrder[i]];\n outComponents.push({\n lines: buildComponentData(frame, component),\n scaleX: component.h / frame.maxH,\n scaleY: component.v / frame.maxV,\n });\n }\n\n const out = new Uint8Array(width * height * outComponents.length);\n let oi = 0;\n for (let y = 0; y < height; ++y) {\n for (let x = 0; x < width; ++x) {\n for (let i = 0; i < outComponents.length; ++i) {\n const component = outComponents[i];\n out[oi] = component.lines[0 | y * component.scaleY][0 | x * component.scaleX];\n ++oi;\n }\n }\n }\n return out;\n }\n}\n\nclass JpegDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n constructor(fileDirectory) {\n super();\n this.reader = new JpegStreamReader();\n if (fileDirectory.JPEGTables) {\n this.reader.parse(fileDirectory.JPEGTables);\n }\n }\n\n decodeBlock(buffer) {\n this.reader.resetFrames();\n this.reader.parse(new Uint8Array(buffer));\n return this.reader.getResult().buffer;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/jpeg.js?");
201
202/***/ }),
203
204/***/ "./node_modules/geotiff/src/compression/lzw.js":
205/*!*****************************************************!*\
206 !*** ./node_modules/geotiff/src/compression/lzw.js ***!
207 \*****************************************************/
208/*! exports provided: default */
209/***/ (function(module, __webpack_exports__, __webpack_require__) {
210
211"use strict";
212eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return LZWDecoder; });\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \"./node_modules/geotiff/src/compression/basedecoder.js\");\n\n\n\nconst MIN_BITS = 9;\nconst CLEAR_CODE = 256; // clear code\nconst EOI_CODE = 257; // end of information\nconst MAX_BYTELENGTH = 12;\n\nfunction getByte(array, position, length) {\n const d = position % 8;\n const a = Math.floor(position / 8);\n const de = 8 - d;\n const ef = (position + length) - ((a + 1) * 8);\n let fg = (8 * (a + 2)) - (position + length);\n const dg = ((a + 2) * 8) - position;\n fg = Math.max(0, fg);\n if (a >= array.length) {\n console.warn('ran off the end of the buffer before finding EOI_CODE (end on input code)');\n return EOI_CODE;\n }\n let chunk1 = array[a] & ((2 ** (8 - d)) - 1);\n chunk1 <<= (length - de);\n let chunks = chunk1;\n if (a + 1 < array.length) {\n let chunk2 = array[a + 1] >>> fg;\n chunk2 <<= Math.max(0, (length - dg));\n chunks += chunk2;\n }\n if (ef > 8 && a + 2 < array.length) {\n const hi = ((a + 3) * 8) - (position + length);\n const chunk3 = array[a + 2] >>> hi;\n chunks += chunk3;\n }\n return chunks;\n}\n\nfunction appendReversed(dest, source) {\n for (let i = source.length - 1; i >= 0; i--) {\n dest.push(source[i]);\n }\n return dest;\n}\n\nfunction decompress(input) {\n const dictionaryIndex = new Uint16Array(4093);\n const dictionaryChar = new Uint8Array(4093);\n for (let i = 0; i <= 257; i++) {\n dictionaryIndex[i] = 4096;\n dictionaryChar[i] = i;\n }\n let dictionaryLength = 258;\n let byteLength = MIN_BITS;\n let position = 0;\n\n function initDictionary() {\n dictionaryLength = 258;\n byteLength = MIN_BITS;\n }\n function getNext(array) {\n const byte = getByte(array, position, byteLength);\n position += byteLength;\n return byte;\n }\n function addToDictionary(i, c) {\n dictionaryChar[dictionaryLength] = c;\n dictionaryIndex[dictionaryLength] = i;\n dictionaryLength++;\n return dictionaryLength - 1;\n }\n function getDictionaryReversed(n) {\n const rev = [];\n for (let i = n; i !== 4096; i = dictionaryIndex[i]) {\n rev.push(dictionaryChar[i]);\n }\n return rev;\n }\n\n const result = [];\n initDictionary();\n const array = new Uint8Array(input);\n let code = getNext(array);\n let oldCode;\n while (code !== EOI_CODE) {\n if (code === CLEAR_CODE) {\n initDictionary();\n code = getNext(array);\n while (code === CLEAR_CODE) {\n code = getNext(array);\n }\n\n if (code === EOI_CODE) {\n break;\n } else if (code > CLEAR_CODE) {\n throw new Error(`corrupted code at scanline ${code}`);\n } else {\n const val = getDictionaryReversed(code);\n appendReversed(result, val);\n oldCode = code;\n }\n } else if (code < dictionaryLength) {\n const val = getDictionaryReversed(code);\n appendReversed(result, val);\n addToDictionary(oldCode, val[val.length - 1]);\n oldCode = code;\n } else {\n const oldVal = getDictionaryReversed(oldCode);\n if (!oldVal) {\n throw new Error(`Bogus entry. Not in dictionary, ${oldCode} / ${dictionaryLength}, position: ${position}`);\n }\n appendReversed(result, oldVal);\n result.push(oldVal[oldVal.length - 1]);\n addToDictionary(oldCode, oldVal[oldVal.length - 1]);\n oldCode = code;\n }\n\n if (dictionaryLength + 1 >= (2 ** byteLength)) {\n if (byteLength === MAX_BYTELENGTH) {\n oldCode = undefined;\n } else {\n byteLength++;\n }\n }\n code = getNext(array);\n }\n return new Uint8Array(result);\n}\n\nclass LZWDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n decodeBlock(buffer) {\n return decompress(buffer, false).buffer;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/lzw.js?");
213
214/***/ }),
215
216/***/ "./node_modules/geotiff/src/compression/packbits.js":
217/*!**********************************************************!*\
218 !*** ./node_modules/geotiff/src/compression/packbits.js ***!
219 \**********************************************************/
220/*! exports provided: default */
221/***/ (function(module, __webpack_exports__, __webpack_require__) {
222
223"use strict";
224eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return PackbitsDecoder; });\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \"./node_modules/geotiff/src/compression/basedecoder.js\");\n\n\n\nclass PackbitsDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n decodeBlock(buffer) {\n const dataView = new DataView(buffer);\n const out = [];\n\n for (let i = 0; i < buffer.byteLength; ++i) {\n let header = dataView.getInt8(i);\n if (header < 0) {\n const next = dataView.getUint8(i + 1);\n header = -header;\n for (let j = 0; j <= header; ++j) {\n out.push(next);\n }\n i += 1;\n } else {\n for (let j = 0; j <= header; ++j) {\n out.push(dataView.getUint8(i + j + 1));\n }\n i += header + 1;\n }\n }\n return new Uint8Array(out).buffer;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/packbits.js?");
225
226/***/ }),
227
228/***/ "./node_modules/geotiff/src/compression/raw.js":
229/*!*****************************************************!*\
230 !*** ./node_modules/geotiff/src/compression/raw.js ***!
231 \*****************************************************/
232/*! exports provided: default */
233/***/ (function(module, __webpack_exports__, __webpack_require__) {
234
235"use strict";
236eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return RawDecoder; });\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \"./node_modules/geotiff/src/compression/basedecoder.js\");\n\n\n\nclass RawDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n decodeBlock(buffer) {\n return buffer;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/raw.js?");
237
238/***/ }),
239
240/***/ "./node_modules/geotiff/src/dataslice.js":
241/*!***********************************************!*\
242 !*** ./node_modules/geotiff/src/dataslice.js ***!
243 \***********************************************/
244/*! exports provided: default */
245/***/ (function(module, __webpack_exports__, __webpack_require__) {
246
247"use strict";
248eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return DataSlice; });\nclass DataSlice {\n constructor(arrayBuffer, sliceOffset, littleEndian, bigTiff) {\n this._dataView = new DataView(arrayBuffer);\n this._sliceOffset = sliceOffset;\n this._littleEndian = littleEndian;\n this._bigTiff = bigTiff;\n }\n\n get sliceOffset() {\n return this._sliceOffset;\n }\n\n get sliceTop() {\n return this._sliceOffset + this.buffer.byteLength;\n }\n\n get littleEndian() {\n return this._littleEndian;\n }\n\n get bigTiff() {\n return this._bigTiff;\n }\n\n get buffer() {\n return this._dataView.buffer;\n }\n\n covers(offset, length) {\n return this.sliceOffset <= offset && this.sliceTop >= offset + length;\n }\n\n readUint8(offset) {\n return this._dataView.getUint8(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readInt8(offset) {\n return this._dataView.getInt8(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readUint16(offset) {\n return this._dataView.getUint16(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readInt16(offset) {\n return this._dataView.getInt16(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readUint32(offset) {\n return this._dataView.getUint32(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readInt32(offset) {\n return this._dataView.getInt32(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readFloat32(offset) {\n return this._dataView.getFloat32(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readFloat64(offset) {\n return this._dataView.getFloat64(\n offset - this._sliceOffset, this._littleEndian,\n );\n }\n\n readUint64(offset) {\n const left = this.readUint32(offset);\n const right = this.readUint32(offset + 4);\n let combined;\n if (this._littleEndian) {\n combined = left + 2 ** 32 * right;\n if (!Number.isSafeInteger(combined)) {\n throw new Error(\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`,\n );\n }\n return combined;\n }\n combined = 2 ** 32 * left + right;\n if (!Number.isSafeInteger(combined)) {\n throw new Error(\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`,\n );\n }\n\n return combined;\n }\n\n // adapted from https://stackoverflow.com/a/55338384/8060591\n readInt64(offset) {\n let value = 0;\n const isNegative =\n (this._dataView.getUint8(offset + (this._littleEndian ? 7 : 0)) & 0x80) >\n 0;\n let carrying = true;\n for (let i = 0; i < 8; i++) {\n let byte = this._dataView.getUint8(\n offset + (this._littleEndian ? i : 7 - i)\n );\n if (isNegative) {\n if (carrying) {\n if (byte !== 0x00) {\n byte = ~(byte - 1) & 0xff;\n carrying = false;\n }\n } else {\n byte = ~byte & 0xff;\n }\n }\n value += byte * 256 ** i;\n }\n if (isNegative) {\n value = -value;\n }\n return value\n }\n\n readOffset(offset) {\n if (this._bigTiff) {\n return this.readUint64(offset);\n }\n return this.readUint32(offset);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/dataslice.js?");
249
250/***/ }),
251
252/***/ "./node_modules/geotiff/src/dataview64.js":
253/*!************************************************!*\
254 !*** ./node_modules/geotiff/src/dataview64.js ***!
255 \************************************************/
256/*! exports provided: default */
257/***/ (function(module, __webpack_exports__, __webpack_require__) {
258
259"use strict";
260eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return DataView64; });\nclass DataView64 {\n constructor(arrayBuffer) {\n this._dataView = new DataView(arrayBuffer);\n }\n\n get buffer() {\n return this._dataView.buffer;\n }\n\n getUint64(offset, littleEndian) {\n const left = this.getUint32(offset, littleEndian);\n const right = this.getUint32(offset + 4, littleEndian);\n let combined;\n if (littleEndian) {\n combined = left + 2 ** 32 * right;\n if (!Number.isSafeInteger(combined)) {\n throw new Error(\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`\n );\n }\n return combined;\n }\n combined = 2 ** 32 * left + right;\n if (!Number.isSafeInteger(combined)) {\n throw new Error(\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`\n );\n }\n\n return combined;\n }\n\n // adapted from https://stackoverflow.com/a/55338384/8060591\n getInt64(offset, littleEndian) {\n let value = 0;\n const isNegative =\n (this._dataView.getUint8(offset + (littleEndian ? 7 : 0)) & 0x80) > 0;\n let carrying = true;\n for (let i = 0; i < 8; i++) {\n let byte = this._dataView.getUint8(offset + (littleEndian ? i : 7 - i));\n if (isNegative) {\n if (carrying) {\n if (byte !== 0x00) {\n byte = ~(byte - 1) & 0xff;\n carrying = false;\n }\n } else {\n byte = ~byte & 0xff;\n }\n }\n value += byte * 256 ** i;\n }\n if (isNegative) {\n value = -value;\n }\n return value;\n }\n\n getUint8(offset, littleEndian) {\n return this._dataView.getUint8(offset, littleEndian);\n }\n\n getInt8(offset, littleEndian) {\n return this._dataView.getInt8(offset, littleEndian);\n }\n\n getUint16(offset, littleEndian) {\n return this._dataView.getUint16(offset, littleEndian);\n }\n\n getInt16(offset, littleEndian) {\n return this._dataView.getInt16(offset, littleEndian);\n }\n\n getUint32(offset, littleEndian) {\n return this._dataView.getUint32(offset, littleEndian);\n }\n\n getInt32(offset, littleEndian) {\n return this._dataView.getInt32(offset, littleEndian);\n }\n\n getFloat32(offset, littleEndian) {\n return this._dataView.getFloat32(offset, littleEndian);\n }\n\n getFloat64(offset, littleEndian) {\n return this._dataView.getFloat64(offset, littleEndian);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/dataview64.js?");
261
262/***/ }),
263
264/***/ "./node_modules/geotiff/src/geotiff.js":
265/*!*********************************************!*\
266 !*** ./node_modules/geotiff/src/geotiff.js ***!
267 \*********************************************/
268/*! exports provided: globals, rgb, getDecoder, setLogger, GeoTIFF, default, MultiGeoTIFF, fromUrl, fromArrayBuffer, fromFile, fromBlob, fromUrls, writeArrayBuffer, Pool */
269/***/ (function(module, __webpack_exports__, __webpack_require__) {
270
271"use strict";
272eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"GeoTIFF\", function() { return GeoTIFF; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"MultiGeoTIFF\", function() { return MultiGeoTIFF; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromUrl\", function() { return fromUrl; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromArrayBuffer\", function() { return fromArrayBuffer; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromFile\", function() { return fromFile; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromBlob\", function() { return fromBlob; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromUrls\", function() { return fromUrls; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"writeArrayBuffer\", function() { return writeArrayBuffer; });\n/* harmony import */ var _geotiffimage__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./geotiffimage */ \"./node_modules/geotiff/src/geotiffimage.js\");\n/* harmony import */ var _dataview64__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dataview64 */ \"./node_modules/geotiff/src/dataview64.js\");\n/* harmony import */ var _dataslice__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dataslice */ \"./node_modules/geotiff/src/dataslice.js\");\n/* harmony import */ var _pool__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./pool */ \"./node_modules/geotiff/src/pool.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Pool\", function() { return _pool__WEBPACK_IMPORTED_MODULE_3__[\"default\"]; });\n\n/* harmony import */ var _source__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./source */ \"./node_modules/geotiff/src/source.js\");\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./globals */ \"./node_modules/geotiff/src/globals.js\");\n/* harmony import */ var _geotiffwriter__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./geotiffwriter */ \"./node_modules/geotiff/src/geotiffwriter.js\");\n/* harmony reexport (module object) */ __webpack_require__.d(__webpack_exports__, \"globals\", function() { return _globals__WEBPACK_IMPORTED_MODULE_5__; });\n/* harmony import */ var _rgb__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./rgb */ \"./node_modules/geotiff/src/rgb.js\");\n/* harmony reexport (module object) */ __webpack_require__.d(__webpack_exports__, \"rgb\", function() { return _rgb__WEBPACK_IMPORTED_MODULE_7__; });\n/* harmony import */ var _compression__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./compression */ \"./node_modules/geotiff/src/compression/index.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"getDecoder\", function() { return _compression__WEBPACK_IMPORTED_MODULE_8__[\"getDecoder\"]; });\n\n/* harmony import */ var _logging__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./logging */ \"./node_modules/geotiff/src/logging.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"setLogger\", function() { return _logging__WEBPACK_IMPORTED_MODULE_9__[\"setLogger\"]; });\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfunction getFieldTypeLength(fieldType) {\n switch (fieldType) {\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].BYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SBYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].UNDEFINED:\n return 1;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SHORT: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SSHORT:\n return 2;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].LONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SLONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].FLOAT: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].IFD:\n return 4;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].RATIONAL: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SRATIONAL: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].DOUBLE:\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].LONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SLONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].IFD8:\n return 8;\n default:\n throw new RangeError(`Invalid field type: ${fieldType}`);\n }\n}\n\nfunction parseGeoKeyDirectory(fileDirectory) {\n const rawGeoKeyDirectory = fileDirectory.GeoKeyDirectory;\n if (!rawGeoKeyDirectory) {\n return null;\n }\n\n const geoKeyDirectory = {};\n for (let i = 4; i <= rawGeoKeyDirectory[3] * 4; i += 4) {\n const key = _globals__WEBPACK_IMPORTED_MODULE_5__[\"geoKeyNames\"][rawGeoKeyDirectory[i]];\n const location = (rawGeoKeyDirectory[i + 1])\n ? (_globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTagNames\"][rawGeoKeyDirectory[i + 1]]) : null;\n const count = rawGeoKeyDirectory[i + 2];\n const offset = rawGeoKeyDirectory[i + 3];\n\n let value = null;\n if (!location) {\n value = offset;\n } else {\n value = fileDirectory[location];\n if (typeof value === 'undefined' || value === null) {\n throw new Error(`Could not get value of geoKey '${key}'.`);\n } else if (typeof value === 'string') {\n value = value.substring(offset, offset + count - 1);\n } else if (value.subarray) {\n value = value.subarray(offset, offset + count);\n if (count === 1) {\n value = value[0];\n }\n }\n }\n geoKeyDirectory[key] = value;\n }\n return geoKeyDirectory;\n}\n\nfunction getValues(dataSlice, fieldType, count, offset) {\n let values = null;\n let readMethod = null;\n const fieldTypeLength = getFieldTypeLength(fieldType);\n\n switch (fieldType) {\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].BYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].UNDEFINED:\n values = new Uint8Array(count); readMethod = dataSlice.readUint8;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SBYTE:\n values = new Int8Array(count); readMethod = dataSlice.readInt8;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SHORT:\n values = new Uint16Array(count); readMethod = dataSlice.readUint16;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SSHORT:\n values = new Int16Array(count); readMethod = dataSlice.readInt16;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].LONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].IFD:\n values = new Uint32Array(count); readMethod = dataSlice.readUint32;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SLONG:\n values = new Int32Array(count); readMethod = dataSlice.readInt32;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].LONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].IFD8:\n values = new Array(count); readMethod = dataSlice.readUint64;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SLONG8:\n values = new Array(count); readMethod = dataSlice.readInt64;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].RATIONAL:\n values = new Uint32Array(count * 2); readMethod = dataSlice.readUint32;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SRATIONAL:\n values = new Int32Array(count * 2); readMethod = dataSlice.readInt32;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].FLOAT:\n values = new Float32Array(count); readMethod = dataSlice.readFloat32;\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].DOUBLE:\n values = new Float64Array(count); readMethod = dataSlice.readFloat64;\n break;\n default:\n throw new RangeError(`Invalid field type: ${fieldType}`);\n }\n\n // normal fields\n if (!(fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].RATIONAL || fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SRATIONAL)) {\n for (let i = 0; i < count; ++i) {\n values[i] = readMethod.call(\n dataSlice, offset + (i * fieldTypeLength),\n );\n }\n } else { // RATIONAL or SRATIONAL\n for (let i = 0; i < count; i += 2) {\n values[i] = readMethod.call(\n dataSlice, offset + (i * fieldTypeLength),\n );\n values[i + 1] = readMethod.call(\n dataSlice, offset + ((i * fieldTypeLength) + 4),\n );\n }\n }\n\n if (fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII) {\n return String.fromCharCode.apply(null, values);\n }\n return values;\n}\n\n/**\n * Data class to store the parsed file directory, geo key directory and\n * offset to the next IFD\n */\nclass ImageFileDirectory {\n constructor(fileDirectory, geoKeyDirectory, nextIFDByteOffset) {\n this.fileDirectory = fileDirectory;\n this.geoKeyDirectory = geoKeyDirectory;\n this.nextIFDByteOffset = nextIFDByteOffset;\n }\n}\n\n/**\n * Error class for cases when an IFD index was requested, that does not exist\n * in the file.\n */\nclass GeoTIFFImageIndexError extends Error {\n constructor(index) {\n super(`No image at index ${index}`);\n this.index = index;\n }\n}\n\n\nclass GeoTIFFBase {\n /**\n * (experimental) Reads raster data from the best fitting image. This function uses\n * the image with the lowest resolution that is still a higher resolution than the\n * requested resolution.\n * When specified, the `bbox` option is translated to the `window` option and the\n * `resX` and `resY` to `width` and `height` respectively.\n * Then, the [readRasters]{@link GeoTIFFImage#readRasters} method of the selected\n * image is called and the result returned.\n * @see GeoTIFFImage.readRasters\n * @param {Object} [options={}] optional parameters\n * @param {Array} [options.window=whole image] the subset to read data from.\n * @param {Array} [options.bbox=whole image] the subset to read data from in\n * geographical coordinates.\n * @param {Array} [options.samples=all samples] the selection of samples to read from.\n * @param {Boolean} [options.interleave=false] whether the data shall be read\n * in one single array or separate\n * arrays.\n * @param {Number} [options.pool=null] The optional decoder pool to use.\n * @param {Number} [options.width] The desired width of the output. When the width is not the\n * same as the images, resampling will be performed.\n * @param {Number} [options.height] The desired height of the output. When the width is not the\n * same as the images, resampling will be performed.\n * @param {String} [options.resampleMethod='nearest'] The desired resampling method.\n * @param {Number|Number[]} [options.fillValue] The value to use for parts of the image\n * outside of the images extent. When multiple\n * samples are requested, an array of fill values\n * can be passed.\n * @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise\n */\n async readRasters(options = {}) {\n const { window: imageWindow, width, height } = options;\n let { resX, resY, bbox } = options;\n\n const firstImage = await this.getImage();\n let usedImage = firstImage;\n const imageCount = await this.getImageCount();\n const imgBBox = firstImage.getBoundingBox();\n\n if (imageWindow && bbox) {\n throw new Error('Both \"bbox\" and \"window\" passed.');\n }\n\n // if width/height is passed, transform it to resolution\n if (width || height) {\n // if we have an image window (pixel coordinates), transform it to a BBox\n // using the origin/resolution of the first image.\n if (imageWindow) {\n const [oX, oY] = firstImage.getOrigin();\n const [rX, rY] = firstImage.getResolution();\n\n bbox = [\n oX + (imageWindow[0] * rX),\n oY + (imageWindow[1] * rY),\n oX + (imageWindow[2] * rX),\n oY + (imageWindow[3] * rY),\n ];\n }\n\n // if we have a bbox (or calculated one)\n\n const usedBBox = bbox || imgBBox;\n\n if (width) {\n if (resX) {\n throw new Error('Both width and resX passed');\n }\n resX = (usedBBox[2] - usedBBox[0]) / width;\n }\n if (height) {\n if (resY) {\n throw new Error('Both width and resY passed');\n }\n resY = (usedBBox[3] - usedBBox[1]) / height;\n }\n }\n\n // if resolution is set or calculated, try to get the image with the worst acceptable resolution\n if (resX || resY) {\n const allImages = [];\n for (let i = 0; i < imageCount; ++i) {\n const image = await this.getImage(i);\n const { SubfileType: subfileType, NewSubfileType: newSubfileType } = image.fileDirectory;\n if (i === 0 || subfileType === 2 || newSubfileType & 1) {\n allImages.push(image);\n }\n }\n\n allImages.sort((a, b) => a.getWidth() - b.getWidth());\n for (let i = 0; i < allImages.length; ++i) {\n const image = allImages[i];\n const imgResX = (imgBBox[2] - imgBBox[0]) / image.getWidth();\n const imgResY = (imgBBox[3] - imgBBox[1]) / image.getHeight();\n\n usedImage = image;\n if ((resX && resX > imgResX) || (resY && resY > imgResY)) {\n break;\n }\n }\n }\n\n let wnd = imageWindow;\n if (bbox) {\n const [oX, oY] = firstImage.getOrigin();\n const [imageResX, imageResY] = usedImage.getResolution(firstImage);\n\n wnd = [\n Math.round((bbox[0] - oX) / imageResX),\n Math.round((bbox[1] - oY) / imageResY),\n Math.round((bbox[2] - oX) / imageResX),\n Math.round((bbox[3] - oY) / imageResY),\n ];\n wnd = [\n Math.min(wnd[0], wnd[2]),\n Math.min(wnd[1], wnd[3]),\n Math.max(wnd[0], wnd[2]),\n Math.max(wnd[1], wnd[3]),\n ];\n }\n\n return usedImage.readRasters({ ...options, window: wnd });\n }\n}\n\n\n/**\n * The abstraction for a whole GeoTIFF file.\n * @augments GeoTIFFBase\n */\nclass GeoTIFF extends GeoTIFFBase {\n /**\n * @constructor\n * @param {Source} source The datasource to read from.\n * @param {Boolean} littleEndian Whether the image uses little endian.\n * @param {Boolean} bigTiff Whether the image uses bigTIFF conventions.\n * @param {Number} firstIFDOffset The numeric byte-offset from the start of the image\n * to the first IFD.\n * @param {Object} [options] further options.\n * @param {Boolean} [options.cache=false] whether or not decoded tiles shall be cached.\n */\n constructor(source, littleEndian, bigTiff, firstIFDOffset, options = {}) {\n super();\n this.source = source;\n this.littleEndian = littleEndian;\n this.bigTiff = bigTiff;\n this.firstIFDOffset = firstIFDOffset;\n this.cache = options.cache || false;\n this.ifdRequests = [];\n this.ghostValues = null;\n }\n\n async getSlice(offset, size) {\n const fallbackSize = this.bigTiff ? 4048 : 1024;\n return new _dataslice__WEBPACK_IMPORTED_MODULE_2__[\"default\"](\n await this.source.fetch(\n offset, typeof size !== 'undefined' ? size : fallbackSize,\n ), offset, this.littleEndian, this.bigTiff,\n );\n }\n\n /**\n * Instructs to parse an image file directory at the given file offset.\n * As there is no way to ensure that a location is indeed the start of an IFD,\n * this function must be called with caution (e.g only using the IFD offsets from\n * the headers or other IFDs).\n * @param {number} offset the offset to parse the IFD at\n * @returns {ImageFileDirectory} the parsed IFD\n */\n async parseFileDirectoryAt(offset) {\n const entrySize = this.bigTiff ? 20 : 12;\n const offsetSize = this.bigTiff ? 8 : 2;\n\n let dataSlice = await this.getSlice(offset);\n const numDirEntries = this.bigTiff ?\n dataSlice.readUint64(offset) :\n dataSlice.readUint16(offset);\n\n // if the slice does not cover the whole IFD, request a bigger slice, where the\n // whole IFD fits: num of entries + n x tag length + offset to next IFD\n const byteSize = (numDirEntries * entrySize) + (this.bigTiff ? 16 : 6);\n if (!dataSlice.covers(offset, byteSize)) {\n dataSlice = await this.getSlice(offset, byteSize);\n }\n\n const fileDirectory = {};\n\n // loop over the IFD and create a file directory object\n let i = offset + (this.bigTiff ? 8 : 2);\n for (let entryCount = 0; entryCount < numDirEntries; i += entrySize, ++entryCount) {\n const fieldTag = dataSlice.readUint16(i);\n const fieldType = dataSlice.readUint16(i + 2);\n const typeCount = this.bigTiff ?\n dataSlice.readUint64(i + 4) :\n dataSlice.readUint32(i + 4);\n\n let fieldValues;\n let value;\n const fieldTypeLength = getFieldTypeLength(fieldType);\n const valueOffset = i + (this.bigTiff ? 12 : 8);\n\n // check whether the value is directly encoded in the tag or refers to a\n // different external byte range\n if (fieldTypeLength * typeCount <= (this.bigTiff ? 8 : 4)) {\n fieldValues = getValues(dataSlice, fieldType, typeCount, valueOffset);\n } else {\n // resolve the reference to the actual byte range\n const actualOffset = dataSlice.readOffset(valueOffset);\n const length = getFieldTypeLength(fieldType) * typeCount;\n\n // check, whether we actually cover the referenced byte range; if not,\n // request a new slice of bytes to read from it\n if (dataSlice.covers(actualOffset, length)) {\n fieldValues = getValues(dataSlice, fieldType, typeCount, actualOffset);\n } else {\n const fieldDataSlice = await this.getSlice(actualOffset, length);\n fieldValues = getValues(fieldDataSlice, fieldType, typeCount, actualOffset);\n }\n }\n\n // unpack single values from the array\n if (typeCount === 1 && _globals__WEBPACK_IMPORTED_MODULE_5__[\"arrayFields\"].indexOf(fieldTag) === -1 &&\n !(fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].RATIONAL || fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].SRATIONAL)) {\n value = fieldValues[0];\n } else {\n value = fieldValues;\n }\n\n // write the tags value to the file directly\n fileDirectory[_globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTagNames\"][fieldTag]] = value;\n }\n const geoKeyDirectory = parseGeoKeyDirectory(fileDirectory);\n const nextIFDByteOffset = dataSlice.readOffset(\n offset + offsetSize + (entrySize * numDirEntries),\n );\n\n return new ImageFileDirectory(\n fileDirectory,\n geoKeyDirectory,\n nextIFDByteOffset,\n );\n }\n\n async requestIFD(index) {\n // see if we already have that IFD index requested.\n if (this.ifdRequests[index]) {\n // attach to an already requested IFD\n return this.ifdRequests[index];\n } else if (index === 0) {\n // special case for index 0\n this.ifdRequests[index] = this.parseFileDirectoryAt(this.firstIFDOffset);\n return this.ifdRequests[index];\n } else if (!this.ifdRequests[index - 1]) {\n // if the previous IFD was not yet loaded, load that one first\n // this is the recursive call.\n try {\n this.ifdRequests[index - 1] = this.requestIFD(index - 1);\n } catch (e) {\n // if the previous one already was an index error, rethrow\n // with the current index\n if (e instanceof GeoTIFFImageIndexError) {\n throw new GeoTIFFImageIndexError(index);\n }\n // rethrow anything else\n throw e;\n }\n }\n // if the previous IFD was loaded, we can finally fetch the one we are interested in.\n // we need to wrap this in an IIFE, otherwise this.ifdRequests[index] would be delayed\n this.ifdRequests[index] = (async () => {\n const previousIfd = await this.ifdRequests[index - 1];\n if (previousIfd.nextIFDByteOffset === 0) {\n throw new GeoTIFFImageIndexError(index);\n }\n return this.parseFileDirectoryAt(previousIfd.nextIFDByteOffset);\n })();\n return this.ifdRequests[index];\n }\n\n /**\n * Get the n-th internal subfile of an image. By default, the first is returned.\n *\n * @param {Number} [index=0] the index of the image to return.\n * @returns {GeoTIFFImage} the image at the given index\n */\n async getImage(index = 0) {\n const ifd = await this.requestIFD(index);\n return new _geotiffimage__WEBPACK_IMPORTED_MODULE_0__[\"default\"](\n ifd.fileDirectory, ifd.geoKeyDirectory,\n this.dataView, this.littleEndian, this.cache, this.source,\n );\n }\n\n /**\n * Returns the count of the internal subfiles.\n *\n * @returns {Number} the number of internal subfile images\n */\n async getImageCount() {\n let index = 0;\n // loop until we run out of IFDs\n let hasNext = true;\n while (hasNext) {\n try {\n await this.requestIFD(index);\n ++index;\n } catch (e) {\n if (e instanceof GeoTIFFImageIndexError) {\n hasNext = false;\n } else {\n throw e;\n }\n }\n }\n return index;\n }\n\n /**\n * Get the values of the COG ghost area as a parsed map.\n * See https://gdal.org/drivers/raster/cog.html#header-ghost-area for reference\n * @returns {Object} the parsed ghost area or null, if no such area was found\n */\n async getGhostValues() {\n const offset = this.bigTiff ? 16 : 8;\n if (this.ghostValues) {\n return this.ghostValues;\n }\n const detectionString = 'GDAL_STRUCTURAL_METADATA_SIZE=';\n const heuristicAreaSize = detectionString.length + 100;\n let slice = await this.getSlice(offset, heuristicAreaSize);\n if (detectionString === getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII, detectionString.length, offset)) {\n const valuesString = getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII, heuristicAreaSize, offset);\n const firstLine = valuesString.split('\\n')[0];\n const metadataSize = Number(firstLine.split('=')[1].split(' ')[0]) + firstLine.length;\n if (metadataSize > heuristicAreaSize) {\n slice = await this.getSlice(offset, metadataSize);\n }\n const fullString = getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\"fieldTypes\"].ASCII, metadataSize, offset);\n this.ghostValues = {};\n fullString\n .split('\\n')\n .filter(line => line.length > 0)\n .map(line => line.split('='))\n .forEach(([key, value]) => {\n this.ghostValues[key] = value;\n });\n }\n return this.ghostValues;\n }\n\n /**\n * Parse a (Geo)TIFF file from the given source.\n *\n * @param {source~Source} source The source of data to parse from.\n * @param {object} options Additional options.\n */\n static async fromSource(source, options) {\n const headerData = await source.fetch(0, 1024);\n const dataView = new _dataview64__WEBPACK_IMPORTED_MODULE_1__[\"default\"](headerData);\n\n const BOM = dataView.getUint16(0, 0);\n let littleEndian;\n if (BOM === 0x4949) {\n littleEndian = true;\n } else if (BOM === 0x4D4D) {\n littleEndian = false;\n } else {\n throw new TypeError('Invalid byte order value.');\n }\n\n const magicNumber = dataView.getUint16(2, littleEndian);\n let bigTiff;\n if (magicNumber === 42) {\n bigTiff = false;\n } else if (magicNumber === 43) {\n bigTiff = true;\n const offsetByteSize = dataView.getUint16(4, littleEndian);\n if (offsetByteSize !== 8) {\n throw new Error('Unsupported offset byte-size.');\n }\n } else {\n throw new TypeError('Invalid magic number.');\n }\n\n const firstIFDOffset = bigTiff\n ? dataView.getUint64(8, littleEndian)\n : dataView.getUint32(4, littleEndian);\n return new GeoTIFF(source, littleEndian, bigTiff, firstIFDOffset, options);\n }\n\n /**\n * Closes the underlying file buffer\n * N.B. After the GeoTIFF has been completely processed it needs\n * to be closed but only if it has been constructed from a file.\n */\n close() {\n if (typeof this.source.close === 'function') {\n return this.source.close();\n }\n return false;\n }\n}\n\n\n/* harmony default export */ __webpack_exports__[\"default\"] = (GeoTIFF);\n\n/**\n * Wrapper for GeoTIFF files that have external overviews.\n * @augments GeoTIFFBase\n */\nclass MultiGeoTIFF extends GeoTIFFBase {\n /**\n * Construct a new MultiGeoTIFF from a main and several overview files.\n * @param {GeoTIFF} mainFile The main GeoTIFF file.\n * @param {GeoTIFF[]} overviewFiles An array of overview files.\n */\n constructor(mainFile, overviewFiles) {\n super();\n this.mainFile = mainFile;\n this.overviewFiles = overviewFiles;\n this.imageFiles = [mainFile].concat(overviewFiles);\n\n this.fileDirectoriesPerFile = null;\n this.fileDirectoriesPerFileParsing = null;\n this.imageCount = null;\n }\n\n async parseFileDirectoriesPerFile() {\n const requests = [this.mainFile.parseFileDirectoryAt(this.mainFile.firstIFDOffset)]\n .concat(this.overviewFiles.map((file) => file.parseFileDirectoryAt(file.firstIFDOffset)));\n\n this.fileDirectoriesPerFile = await Promise.all(requests);\n return this.fileDirectoriesPerFile;\n }\n\n /**\n * Get the n-th internal subfile of an image. By default, the first is returned.\n *\n * @param {Number} [index=0] the index of the image to return.\n * @returns {GeoTIFFImage} the image at the given index\n */\n async getImage(index = 0) {\n await this.getImageCount();\n await this.parseFileDirectoriesPerFile();\n let visited = 0;\n let relativeIndex = 0;\n for (let i = 0; i < this.imageFiles.length; i++) {\n const imageFile = this.imageFiles[i];\n for (let ii = 0; ii < this.imageCounts[i]; ii++) {\n if (index === visited) {\n const ifd = await imageFile.requestIFD(relativeIndex);\n return new _geotiffimage__WEBPACK_IMPORTED_MODULE_0__[\"default\"](\n ifd.fileDirectory, imageFile.geoKeyDirectory,\n imageFile.dataView, imageFile.littleEndian, imageFile.cache, imageFile.source,\n );\n }\n visited++;\n relativeIndex++;\n }\n relativeIndex = 0;\n }\n\n throw new RangeError('Invalid image index');\n }\n\n /**\n * Returns the count of the internal subfiles.\n *\n * @returns {Number} the number of internal subfile images\n */\n async getImageCount() {\n if (this.imageCount !== null) {\n return this.imageCount;\n }\n const requests = [this.mainFile.getImageCount()]\n .concat(this.overviewFiles.map((file) => file.getImageCount()));\n this.imageCounts = await Promise.all(requests);\n this.imageCount = this.imageCounts.reduce((count, ifds) => count + ifds, 0);\n return this.imageCount;\n }\n}\n\n\n\n/**\n * Creates a new GeoTIFF from a remote URL.\n * @param {string} url The URL to access the image from\n * @param {object} [options] Additional options to pass to the source.\n * See {@link makeRemoteSource} for details.\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\n */\nasync function fromUrl(url, options = {}) {\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeRemoteSource\"])(url, options));\n}\n\n/**\n * Construct a new GeoTIFF from an\n * [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.\n * @param {ArrayBuffer} arrayBuffer The data to read the file from.\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\n */\nasync function fromArrayBuffer(arrayBuffer) {\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeBufferSource\"])(arrayBuffer));\n}\n\n/**\n * Construct a GeoTIFF from a local file path. This uses the node\n * [filesystem API]{@link https://nodejs.org/api/fs.html} and is\n * not available on browsers.\n *\n * N.B. After the GeoTIFF has been completely processed it needs\n * to be closed but only if it has been constructed from a file.\n * @param {string} path The file path to read from.\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\n */\nasync function fromFile(path) {\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeFileSource\"])(path));\n}\n\n/**\n * Construct a GeoTIFF from an HTML\n * [Blob]{@link https://developer.mozilla.org/en-US/docs/Web/API/Blob} or\n * [File]{@link https://developer.mozilla.org/en-US/docs/Web/API/File}\n * object.\n * @param {Blob|File} blob The Blob or File object to read from.\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\n */\nasync function fromBlob(blob) {\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeFileReaderSource\"])(blob));\n}\n\n/**\n * Construct a MultiGeoTIFF from the given URLs.\n * @param {string} mainUrl The URL for the main file.\n * @param {string[]} overviewUrls An array of URLs for the overview images.\n * @param {object} [options] Additional options to pass to the source.\n * See [makeRemoteSource]{@link module:source.makeRemoteSource}\n * for details.\n * @returns {Promise.<MultiGeoTIFF>} The resulting MultiGeoTIFF file.\n */\nasync function fromUrls(mainUrl, overviewUrls = [], options = {}) {\n const mainFile = await GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeRemoteSource\"])(mainUrl, options));\n const overviewFiles = await Promise.all(\n overviewUrls.map((url) => GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\"makeRemoteSource\"])(url, options))),\n );\n\n return new MultiGeoTIFF(mainFile, overviewFiles);\n}\n\n/**\n * Main creating function for GeoTIFF files.\n * @param {(Array)} array of pixel values\n * @returns {metadata} metadata\n */\nasync function writeArrayBuffer(values, metadata) {\n return Object(_geotiffwriter__WEBPACK_IMPORTED_MODULE_6__[\"writeGeotiff\"])(values, metadata);\n}\n\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiff.js?");
273
274/***/ }),
275
276/***/ "./node_modules/geotiff/src/geotiffimage.js":
277/*!**************************************************!*\
278 !*** ./node_modules/geotiff/src/geotiffimage.js ***!
279 \**************************************************/
280/*! exports provided: default */
281/***/ (function(module, __webpack_exports__, __webpack_require__) {
282
283"use strict";
284eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var txml__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! txml */ \"./node_modules/txml/tXml.js\");\n/* harmony import */ var txml__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(txml__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./globals */ \"./node_modules/geotiff/src/globals.js\");\n/* harmony import */ var _rgb__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./rgb */ \"./node_modules/geotiff/src/rgb.js\");\n/* harmony import */ var _compression__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./compression */ \"./node_modules/geotiff/src/compression/index.js\");\n/* harmony import */ var _resample__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./resample */ \"./node_modules/geotiff/src/resample.js\");\n/* eslint max-len: [\"error\", { \"code\": 120 }] */\n\n\n\n\n\n\n\nfunction sum(array, start, end) {\n let s = 0;\n for (let i = start; i < end; ++i) {\n s += array[i];\n }\n return s;\n}\n\nfunction arrayForType(format, bitsPerSample, size) {\n switch (format) {\n case 1: // unsigned integer data\n switch (bitsPerSample) {\n case 8:\n return new Uint8Array(size);\n case 16:\n return new Uint16Array(size);\n case 32:\n return new Uint32Array(size);\n default:\n break;\n }\n break;\n case 2: // twos complement signed integer data\n switch (bitsPerSample) {\n case 8:\n return new Int8Array(size);\n case 16:\n return new Int16Array(size);\n case 32:\n return new Int32Array(size);\n default:\n break;\n }\n break;\n case 3: // floating point data\n switch (bitsPerSample) {\n case 32:\n return new Float32Array(size);\n case 64:\n return new Float64Array(size);\n default:\n break;\n }\n break;\n default:\n break;\n }\n throw Error('Unsupported data format/bitsPerSample');\n}\n\n/**\n * GeoTIFF sub-file image.\n */\nclass GeoTIFFImage {\n /**\n * @constructor\n * @param {Object} fileDirectory The parsed file directory\n * @param {Object} geoKeys The parsed geo-keys\n * @param {DataView} dataView The DataView for the underlying file.\n * @param {Boolean} littleEndian Whether the file is encoded in little or big endian\n * @param {Boolean} cache Whether or not decoded tiles shall be cached\n * @param {Source} source The datasource to read from\n */\n constructor(fileDirectory, geoKeys, dataView, littleEndian, cache, source) {\n this.fileDirectory = fileDirectory;\n this.geoKeys = geoKeys;\n this.dataView = dataView;\n this.littleEndian = littleEndian;\n this.tiles = cache ? {} : null;\n this.isTiled = !fileDirectory.StripOffsets;\n const planarConfiguration = fileDirectory.PlanarConfiguration;\n this.planarConfiguration = (typeof planarConfiguration === 'undefined') ? 1 : planarConfiguration;\n if (this.planarConfiguration !== 1 && this.planarConfiguration !== 2) {\n throw new Error('Invalid planar configuration.');\n }\n\n this.source = source;\n }\n\n /**\n * Returns the associated parsed file directory.\n * @returns {Object} the parsed file directory\n */\n getFileDirectory() {\n return this.fileDirectory;\n }\n\n /**\n * Returns the associated parsed geo keys.\n * @returns {Object} the parsed geo keys\n */\n getGeoKeys() {\n return this.geoKeys;\n }\n\n /**\n * Returns the width of the image.\n * @returns {Number} the width of the image\n */\n getWidth() {\n return this.fileDirectory.ImageWidth;\n }\n\n /**\n * Returns the height of the image.\n * @returns {Number} the height of the image\n */\n getHeight() {\n return this.fileDirectory.ImageLength;\n }\n\n /**\n * Returns the number of samples per pixel.\n * @returns {Number} the number of samples per pixel\n */\n getSamplesPerPixel() {\n return this.fileDirectory.SamplesPerPixel;\n }\n\n /**\n * Returns the width of each tile.\n * @returns {Number} the width of each tile\n */\n getTileWidth() {\n return this.isTiled ? this.fileDirectory.TileWidth : this.getWidth();\n }\n\n /**\n * Returns the height of each tile.\n * @returns {Number} the height of each tile\n */\n getTileHeight() {\n if (this.isTiled) {\n return this.fileDirectory.TileLength;\n }\n if (typeof this.fileDirectory.RowsPerStrip !== 'undefined') {\n return Math.min(this.fileDirectory.RowsPerStrip, this.getHeight());\n }\n return this.getHeight();\n }\n\n /**\n * Calculates the number of bytes for each pixel across all samples. Only full\n * bytes are supported, an exception is thrown when this is not the case.\n * @returns {Number} the bytes per pixel\n */\n getBytesPerPixel() {\n let bitsPerSample = 0;\n for (let i = 0; i < this.fileDirectory.BitsPerSample.length; ++i) {\n const bits = this.fileDirectory.BitsPerSample[i];\n if ((bits % 8) !== 0) {\n throw new Error(`Sample bit-width of ${bits} is not supported.`);\n } else if (bits !== this.fileDirectory.BitsPerSample[0]) {\n throw new Error('Differing size of samples in a pixel are not supported.');\n }\n bitsPerSample += bits;\n }\n return bitsPerSample / 8;\n }\n\n getSampleByteSize(i) {\n if (i >= this.fileDirectory.BitsPerSample.length) {\n throw new RangeError(`Sample index ${i} is out of range.`);\n }\n const bits = this.fileDirectory.BitsPerSample[i];\n if ((bits % 8) !== 0) {\n throw new Error(`Sample bit-width of ${bits} is not supported.`);\n }\n return (bits / 8);\n }\n\n getReaderForSample(sampleIndex) {\n const format = this.fileDirectory.SampleFormat\n ? this.fileDirectory.SampleFormat[sampleIndex] : 1;\n const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];\n switch (format) {\n case 1: // unsigned integer data\n switch (bitsPerSample) {\n case 8:\n return DataView.prototype.getUint8;\n case 16:\n return DataView.prototype.getUint16;\n case 32:\n return DataView.prototype.getUint32;\n default:\n break;\n }\n break;\n case 2: // twos complement signed integer data\n switch (bitsPerSample) {\n case 8:\n return DataView.prototype.getInt8;\n case 16:\n return DataView.prototype.getInt16;\n case 32:\n return DataView.prototype.getInt32;\n default:\n break;\n }\n break;\n case 3:\n switch (bitsPerSample) {\n case 32:\n return DataView.prototype.getFloat32;\n case 64:\n return DataView.prototype.getFloat64;\n default:\n break;\n }\n break;\n default:\n break;\n }\n throw Error('Unsupported data format/bitsPerSample');\n }\n\n getArrayForSample(sampleIndex, size) {\n const format = this.fileDirectory.SampleFormat\n ? this.fileDirectory.SampleFormat[sampleIndex] : 1;\n const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];\n return arrayForType(format, bitsPerSample, size);\n }\n\n /**\n * Returns the decoded strip or tile.\n * @param {Number} x the strip or tile x-offset\n * @param {Number} y the tile y-offset (0 for stripped images)\n * @param {Number} sample the sample to get for separated samples\n * @param {Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool\n * @returns {Promise.<ArrayBuffer>}\n */\n async getTileOrStrip(x, y, sample, poolOrDecoder) {\n const numTilesPerRow = Math.ceil(this.getWidth() / this.getTileWidth());\n const numTilesPerCol = Math.ceil(this.getHeight() / this.getTileHeight());\n let index;\n const { tiles } = this;\n if (this.planarConfiguration === 1) {\n index = (y * numTilesPerRow) + x;\n } else if (this.planarConfiguration === 2) {\n index = (sample * numTilesPerRow * numTilesPerCol) + (y * numTilesPerRow) + x;\n }\n\n let offset;\n let byteCount;\n if (this.isTiled) {\n offset = this.fileDirectory.TileOffsets[index];\n byteCount = this.fileDirectory.TileByteCounts[index];\n } else {\n offset = this.fileDirectory.StripOffsets[index];\n byteCount = this.fileDirectory.StripByteCounts[index];\n }\n const slice = await this.source.fetch(offset, byteCount);\n\n // either use the provided pool or decoder to decode the data\n let request;\n if (tiles === null) {\n request = poolOrDecoder.decode(this.fileDirectory, slice);\n } else if (!tiles[index]) {\n request = poolOrDecoder.decode(this.fileDirectory, slice);\n tiles[index] = request;\n }\n return { x, y, sample, data: await request };\n }\n\n /**\n * Internal read function.\n * @private\n * @param {Array} imageWindow The image window in pixel coordinates\n * @param {Array} samples The selected samples (0-based indices)\n * @param {TypedArray[]|TypedArray} valueArrays The array(s) to write into\n * @param {Boolean} interleave Whether or not to write in an interleaved manner\n * @param {Pool} pool The decoder pool\n * @returns {Promise<TypedArray[]>|Promise<TypedArray>}\n */\n async _readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod) {\n const tileWidth = this.getTileWidth();\n const tileHeight = this.getTileHeight();\n\n const minXTile = Math.max(Math.floor(imageWindow[0] / tileWidth), 0);\n const maxXTile = Math.min(\n Math.ceil(imageWindow[2] / tileWidth),\n Math.ceil(this.getWidth() / this.getTileWidth()),\n );\n const minYTile = Math.max(Math.floor(imageWindow[1] / tileHeight), 0);\n const maxYTile = Math.min(\n Math.ceil(imageWindow[3] / tileHeight),\n Math.ceil(this.getHeight() / this.getTileHeight()),\n );\n const windowWidth = imageWindow[2] - imageWindow[0];\n\n let bytesPerPixel = this.getBytesPerPixel();\n\n const srcSampleOffsets = [];\n const sampleReaders = [];\n for (let i = 0; i < samples.length; ++i) {\n if (this.planarConfiguration === 1) {\n srcSampleOffsets.push(sum(this.fileDirectory.BitsPerSample, 0, samples[i]) / 8);\n } else {\n srcSampleOffsets.push(0);\n }\n sampleReaders.push(this.getReaderForSample(samples[i]));\n }\n\n const promises = [];\n const { littleEndian } = this;\n\n for (let yTile = minYTile; yTile < maxYTile; ++yTile) {\n for (let xTile = minXTile; xTile < maxXTile; ++xTile) {\n for (let sampleIndex = 0; sampleIndex < samples.length; ++sampleIndex) {\n const si = sampleIndex;\n const sample = samples[sampleIndex];\n if (this.planarConfiguration === 2) {\n bytesPerPixel = this.getSampleByteSize(sample);\n }\n const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder);\n promises.push(promise);\n promise.then((tile) => {\n const buffer = tile.data;\n const dataView = new DataView(buffer);\n const firstLine = tile.y * tileHeight;\n const firstCol = tile.x * tileWidth;\n const lastLine = (tile.y + 1) * tileHeight;\n const lastCol = (tile.x + 1) * tileWidth;\n const reader = sampleReaders[si];\n\n const ymax = Math.min(tileHeight, tileHeight - (lastLine - imageWindow[3]));\n const xmax = Math.min(tileWidth, tileWidth - (lastCol - imageWindow[2]));\n\n for (let y = Math.max(0, imageWindow[1] - firstLine); y < ymax; ++y) {\n for (let x = Math.max(0, imageWindow[0] - firstCol); x < xmax; ++x) {\n const pixelOffset = ((y * tileWidth) + x) * bytesPerPixel;\n const value = reader.call(\n dataView, pixelOffset + srcSampleOffsets[si], littleEndian,\n );\n let windowCoordinate;\n if (interleave) {\n windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth * samples.length)\n + ((x + firstCol - imageWindow[0]) * samples.length)\n + si;\n valueArrays[windowCoordinate] = value;\n } else {\n windowCoordinate = (\n (y + firstLine - imageWindow[1]) * windowWidth\n ) + x + firstCol - imageWindow[0];\n valueArrays[si][windowCoordinate] = value;\n }\n }\n }\n });\n }\n }\n }\n await Promise.all(promises);\n\n if ((width && (imageWindow[2] - imageWindow[0]) !== width)\n || (height && (imageWindow[3] - imageWindow[1]) !== height)) {\n let resampled;\n if (interleave) {\n resampled = Object(_resample__WEBPACK_IMPORTED_MODULE_4__[\"resampleInterleaved\"])(\n valueArrays,\n imageWindow[2] - imageWindow[0],\n imageWindow[3] - imageWindow[1],\n width, height,\n samples.length,\n resampleMethod,\n );\n } else {\n resampled = Object(_resample__WEBPACK_IMPORTED_MODULE_4__[\"resample\"])(\n valueArrays,\n imageWindow[2] - imageWindow[0],\n imageWindow[3] - imageWindow[1],\n width, height,\n resampleMethod,\n );\n }\n resampled.width = width;\n resampled.height = height;\n return resampled;\n }\n\n valueArrays.width = width || imageWindow[2] - imageWindow[0];\n valueArrays.height = height || imageWindow[3] - imageWindow[1];\n\n return valueArrays;\n }\n\n /**\n * Reads raster data from the image. This function reads all selected samples\n * into separate arrays of the correct type for that sample or into a single\n * combined array when `interleave` is set. When provided, only a subset\n * of the raster is read for each sample.\n *\n * @param {Object} [options={}] optional parameters\n * @param {Array} [options.window=whole image] the subset to read data from.\n * @param {Array} [options.samples=all samples] the selection of samples to read from.\n * @param {Boolean} [options.interleave=false] whether the data shall be read\n * in one single array or separate\n * arrays.\n * @param {Number} [options.pool=null] The optional decoder pool to use.\n * @param {number} [options.width] The desired width of the output. When the width is\n * not the same as the images, resampling will be\n * performed.\n * @param {number} [options.height] The desired height of the output. When the width\n * is not the same as the images, resampling will\n * be performed.\n * @param {string} [options.resampleMethod='nearest'] The desired resampling method.\n * @param {number|number[]} [options.fillValue] The value to use for parts of the image\n * outside of the images extent. When\n * multiple samples are requested, an\n * array of fill values can be passed.\n * @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise\n */\n async readRasters({\n window: wnd, samples = [], interleave, pool = null,\n width, height, resampleMethod, fillValue,\n } = {}) {\n const imageWindow = wnd || [0, 0, this.getWidth(), this.getHeight()];\n\n // check parameters\n if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {\n throw new Error('Invalid subsets');\n }\n\n const imageWindowWidth = imageWindow[2] - imageWindow[0];\n const imageWindowHeight = imageWindow[3] - imageWindow[1];\n const numPixels = imageWindowWidth * imageWindowHeight;\n\n if (!samples || !samples.length) {\n for (let i = 0; i < this.fileDirectory.SamplesPerPixel; ++i) {\n samples.push(i);\n }\n } else {\n for (let i = 0; i < samples.length; ++i) {\n if (samples[i] >= this.fileDirectory.SamplesPerPixel) {\n return Promise.reject(new RangeError(`Invalid sample index '${samples[i]}'.`));\n }\n }\n }\n let valueArrays;\n if (interleave) {\n const format = this.fileDirectory.SampleFormat\n ? Math.max.apply(null, this.fileDirectory.SampleFormat) : 1;\n const bitsPerSample = Math.max.apply(null, this.fileDirectory.BitsPerSample);\n valueArrays = arrayForType(format, bitsPerSample, numPixels * samples.length);\n if (fillValue) {\n valueArrays.fill(fillValue);\n }\n } else {\n valueArrays = [];\n for (let i = 0; i < samples.length; ++i) {\n const valueArray = this.getArrayForSample(samples[i], numPixels);\n if (Array.isArray(fillValue) && i < fillValue.length) {\n valueArray.fill(fillValue[i]);\n } else if (fillValue && !Array.isArray(fillValue)) {\n valueArray.fill(fillValue);\n }\n valueArrays.push(valueArray);\n }\n }\n\n const poolOrDecoder = pool || Object(_compression__WEBPACK_IMPORTED_MODULE_3__[\"getDecoder\"])(this.fileDirectory);\n\n const result = await this._readRaster(\n imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod,\n );\n return result;\n }\n\n /**\n * Reads raster data from the image as RGB. The result is always an\n * interleaved typed array.\n * Colorspaces other than RGB will be transformed to RGB, color maps expanded.\n * When no other method is applicable, the first sample is used to produce a\n * greayscale image.\n * When provided, only a subset of the raster is read for each sample.\n *\n * @param {Object} [options] optional parameters\n * @param {Array} [options.window=whole image] the subset to read data from.\n * @param {Number} [pool=null] The optional decoder pool to use.\n * @param {number} [width] The desired width of the output. When the width is no the\n * same as the images, resampling will be performed.\n * @param {number} [height] The desired height of the output. When the width is no the\n * same as the images, resampling will be performed.\n * @param {string} [resampleMethod='nearest'] The desired resampling method.\n * @param {bool} [enableAlpha=false] Enable reading alpha channel if present.\n * @returns {Promise.<TypedArray|TypedArray[]>} the RGB array as a Promise\n */\n async readRGB({ window, pool = null, width, height, resampleMethod, enableAlpha = false } = {}) {\n const imageWindow = window || [0, 0, this.getWidth(), this.getHeight()];\n\n // check parameters\n if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {\n throw new Error('Invalid subsets');\n }\n\n const pi = this.fileDirectory.PhotometricInterpretation;\n\n if (pi === _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].RGB) {\n let s = [0, 1, 2];\n if ((!(this.fileDirectory.ExtraSamples === _globals__WEBPACK_IMPORTED_MODULE_1__[\"ExtraSamplesValues\"].Unspecified)) && enableAlpha) {\n s = [];\n for (let i = 0; i < this.fileDirectory.BitsPerSample.length; i += 1) {\n s.push(i);\n }\n }\n return this.readRasters({\n window,\n interleave: true,\n samples: s,\n pool,\n width,\n height,\n });\n }\n\n let samples;\n switch (pi) {\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].WhiteIsZero:\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].BlackIsZero:\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].Palette:\n samples = [0];\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].CMYK:\n samples = [0, 1, 2, 3];\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].YCbCr:\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].CIELab:\n samples = [0, 1, 2];\n break;\n default:\n throw new Error('Invalid or unsupported photometric interpretation.');\n }\n\n const subOptions = {\n window: imageWindow,\n interleave: true,\n samples,\n pool,\n width,\n height,\n resampleMethod,\n };\n const { fileDirectory } = this;\n const raster = await this.readRasters(subOptions);\n\n const max = 2 ** this.fileDirectory.BitsPerSample[0];\n let data;\n switch (pi) {\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].WhiteIsZero:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromWhiteIsZero\"])(raster, max);\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].BlackIsZero:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromBlackIsZero\"])(raster, max);\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].Palette:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromPalette\"])(raster, fileDirectory.ColorMap);\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].CMYK:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromCMYK\"])(raster);\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].YCbCr:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromYCbCr\"])(raster);\n break;\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\"photometricInterpretations\"].CIELab:\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\"fromCIELab\"])(raster);\n break;\n default:\n throw new Error('Unsupported photometric interpretation.');\n }\n data.width = raster.width;\n data.height = raster.height;\n return data;\n }\n\n /**\n * Returns an array of tiepoints.\n * @returns {Object[]}\n */\n getTiePoints() {\n if (!this.fileDirectory.ModelTiepoint) {\n return [];\n }\n\n const tiePoints = [];\n for (let i = 0; i < this.fileDirectory.ModelTiepoint.length; i += 6) {\n tiePoints.push({\n i: this.fileDirectory.ModelTiepoint[i],\n j: this.fileDirectory.ModelTiepoint[i + 1],\n k: this.fileDirectory.ModelTiepoint[i + 2],\n x: this.fileDirectory.ModelTiepoint[i + 3],\n y: this.fileDirectory.ModelTiepoint[i + 4],\n z: this.fileDirectory.ModelTiepoint[i + 5],\n });\n }\n return tiePoints;\n }\n\n /**\n * Returns the parsed GDAL metadata items.\n *\n * If sample is passed to null, dataset-level metadata will be returned.\n * Otherwise only metadata specific to the provided sample will be returned.\n *\n * @param {Number} [sample=null] The sample index.\n * @returns {Object}\n */\n getGDALMetadata(sample = null) {\n const metadata = {};\n if (!this.fileDirectory.GDAL_METADATA) {\n return null;\n }\n const string = this.fileDirectory.GDAL_METADATA;\n const xmlDom = txml__WEBPACK_IMPORTED_MODULE_0___default()(string.substring(0, string.length - 1));\n\n if (!xmlDom[0].tagName) {\n throw new Error('Failed to parse GDAL metadata XML.');\n }\n\n const root = xmlDom[0];\n if (root.tagName !== 'GDALMetadata') {\n throw new Error('Unexpected GDAL metadata XML tag.');\n }\n\n let items = root.children\n .filter((child) => child.tagName === 'Item');\n\n if (sample) {\n items = items.filter((item) => Number(item.attributes.sample) === sample);\n }\n\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n metadata[item.attributes.name] = item.children[0];\n }\n return metadata;\n }\n\n /**\n * Returns the GDAL nodata value\n * @returns {Number} or null\n */\n getGDALNoData() {\n if (!this.fileDirectory.GDAL_NODATA) {\n return null;\n }\n const string = this.fileDirectory.GDAL_NODATA;\n return Number(string.substring(0, string.length - 1));\n }\n\n /**\n * Returns the image origin as a XYZ-vector. When the image has no affine\n * transformation, then an exception is thrown.\n * @returns {Array} The origin as a vector\n */\n getOrigin() {\n const tiePoints = this.fileDirectory.ModelTiepoint;\n const modelTransformation = this.fileDirectory.ModelTransformation;\n if (tiePoints && tiePoints.length === 6) {\n return [\n tiePoints[3],\n tiePoints[4],\n tiePoints[5],\n ];\n }\n if (modelTransformation) {\n return [\n modelTransformation[3],\n modelTransformation[7],\n modelTransformation[11],\n ];\n }\n throw new Error('The image does not have an affine transformation.');\n }\n\n /**\n * Returns the image resolution as a XYZ-vector. When the image has no affine\n * transformation, then an exception is thrown.\n * @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from\n * in cases when the current image does not have the\n * required tags on its own.\n * @returns {Array} The resolution as a vector\n */\n getResolution(referenceImage = null) {\n const modelPixelScale = this.fileDirectory.ModelPixelScale;\n const modelTransformation = this.fileDirectory.ModelTransformation;\n\n if (modelPixelScale) {\n return [\n modelPixelScale[0],\n -modelPixelScale[1],\n modelPixelScale[2],\n ];\n }\n if (modelTransformation) {\n return [\n modelTransformation[0],\n modelTransformation[5],\n modelTransformation[10],\n ];\n }\n\n if (referenceImage) {\n const [refResX, refResY, refResZ] = referenceImage.getResolution();\n return [\n refResX * referenceImage.getWidth() / this.getWidth(),\n refResY * referenceImage.getHeight() / this.getHeight(),\n refResZ * referenceImage.getWidth() / this.getWidth(),\n ];\n }\n\n throw new Error('The image does not have an affine transformation.');\n }\n\n /**\n * Returns whether or not the pixels of the image depict an area (or point).\n * @returns {Boolean} Whether the pixels are a point\n */\n pixelIsArea() {\n return this.geoKeys.GTRasterTypeGeoKey === 1;\n }\n\n /**\n * Returns the image bounding box as an array of 4 values: min-x, min-y,\n * max-x and max-y. When the image has no affine transformation, then an\n * exception is thrown.\n * @returns {Array} The bounding box\n */\n getBoundingBox() {\n const origin = this.getOrigin();\n const resolution = this.getResolution();\n\n const x1 = origin[0];\n const y1 = origin[1];\n\n const x2 = x1 + (resolution[0] * this.getWidth());\n const y2 = y1 + (resolution[1] * this.getHeight());\n\n return [\n Math.min(x1, x2),\n Math.min(y1, y2),\n Math.max(x1, x2),\n Math.max(y1, y2),\n ];\n }\n}\n\n/* harmony default export */ __webpack_exports__[\"default\"] = (GeoTIFFImage);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiffimage.js?");
285
286/***/ }),
287
288/***/ "./node_modules/geotiff/src/geotiffwriter.js":
289/*!***************************************************!*\
290 !*** ./node_modules/geotiff/src/geotiffwriter.js ***!
291 \***************************************************/
292/*! exports provided: writeGeotiff */
293/***/ (function(module, __webpack_exports__, __webpack_require__) {
294
295"use strict";
296eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"writeGeotiff\", function() { return writeGeotiff; });\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./globals */ \"./node_modules/geotiff/src/globals.js\");\n/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./utils */ \"./node_modules/geotiff/src/utils.js\");\n/*\n Some parts of this file are based on UTIF.js,\n which was released under the MIT License.\n You can view that here:\n https://github.com/photopea/UTIF.js/blob/master/LICENSE\n*/\n\n\n\nconst tagName2Code = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"invert\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\"fieldTagNames\"]);\nconst geoKeyName2Code = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"invert\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\"geoKeyNames\"]);\nconst name2code = {};\nObject(_utils__WEBPACK_IMPORTED_MODULE_1__[\"assign\"])(name2code, tagName2Code);\nObject(_utils__WEBPACK_IMPORTED_MODULE_1__[\"assign\"])(name2code, geoKeyName2Code);\nconst typeName2byte = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"invert\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\"fieldTypeNames\"]);\n\n// config variables\nconst numBytesInIfd = 1000;\n\nconst _binBE = {\n nextZero: (data, o) => {\n let oincr = o;\n while (data[oincr] !== 0) {\n oincr++;\n }\n return oincr;\n },\n readUshort: (buff, p) => {\n return (buff[p] << 8) | buff[p + 1];\n },\n readShort: (buff, p) => {\n const a = _binBE.ui8;\n a[0] = buff[p + 1];\n a[1] = buff[p + 0];\n return _binBE.i16[0];\n },\n readInt: (buff, p) => {\n const a = _binBE.ui8;\n a[0] = buff[p + 3];\n a[1] = buff[p + 2];\n a[2] = buff[p + 1];\n a[3] = buff[p + 0];\n return _binBE.i32[0];\n },\n readUint: (buff, p) => {\n const a = _binBE.ui8;\n a[0] = buff[p + 3];\n a[1] = buff[p + 2];\n a[2] = buff[p + 1];\n a[3] = buff[p + 0];\n return _binBE.ui32[0];\n },\n readASCII: (buff, p, l) => {\n return l.map((i) => String.fromCharCode(buff[p + i])).join('');\n },\n readFloat: (buff, p) => {\n const a = _binBE.ui8;\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(4, (i) => {\n a[i] = buff[p + 3 - i];\n });\n return _binBE.fl32[0];\n },\n readDouble: (buff, p) => {\n const a = _binBE.ui8;\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(8, (i) => {\n a[i] = buff[p + 7 - i];\n });\n return _binBE.fl64[0];\n },\n writeUshort: (buff, p, n) => {\n buff[p] = (n >> 8) & 255;\n buff[p + 1] = n & 255;\n },\n writeUint: (buff, p, n) => {\n buff[p] = (n >> 24) & 255;\n buff[p + 1] = (n >> 16) & 255;\n buff[p + 2] = (n >> 8) & 255;\n buff[p + 3] = (n >> 0) & 255;\n },\n writeASCII: (buff, p, s) => {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(s.length, (i) => {\n buff[p + i] = s.charCodeAt(i);\n });\n },\n ui8: new Uint8Array(8),\n};\n\n_binBE.fl64 = new Float64Array(_binBE.ui8.buffer);\n\n_binBE.writeDouble = (buff, p, n) => {\n _binBE.fl64[0] = n;\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(8, (i) => {\n buff[p + i] = _binBE.ui8[7 - i];\n });\n};\n\n\nconst _writeIFD = (bin, data, _offset, ifd) => {\n let offset = _offset;\n\n const keys = Object.keys(ifd).filter((key) => {\n return key !== undefined && key !== null && key !== 'undefined';\n });\n\n bin.writeUshort(data, offset, keys.length);\n offset += 2;\n\n let eoff = offset + (12 * keys.length) + 4;\n\n for (const key of keys) {\n let tag = null;\n if (typeof key === 'number') {\n tag = key;\n } else if (typeof key === 'string') {\n tag = parseInt(key, 10);\n }\n\n const typeName = _globals__WEBPACK_IMPORTED_MODULE_0__[\"fieldTagTypes\"][tag];\n const typeNum = typeName2byte[typeName];\n\n if (typeName == null || typeName === undefined || typeof typeName === 'undefined') {\n throw new Error(`unknown type of tag: ${tag}`);\n }\n\n let val = ifd[key];\n\n if (typeof val === 'undefined') {\n throw new Error(`failed to get value for key ${key}`);\n }\n\n // ASCIIZ format with trailing 0 character\n // http://www.fileformat.info/format/tiff/corion.htm\n // https://stackoverflow.com/questions/7783044/whats-the-difference-between-asciiz-vs-ascii\n if (typeName === 'ASCII' && typeof val === 'string' && Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"endsWith\"])(val, '\\u0000') === false) {\n val += '\\u0000';\n }\n\n const num = val.length;\n\n bin.writeUshort(data, offset, tag);\n offset += 2;\n\n bin.writeUshort(data, offset, typeNum);\n offset += 2;\n\n bin.writeUint(data, offset, num);\n offset += 4;\n\n let dlen = [-1, 1, 1, 2, 4, 8, 0, 0, 0, 0, 0, 0, 8][typeNum] * num;\n let toff = offset;\n\n if (dlen > 4) {\n bin.writeUint(data, offset, eoff);\n toff = eoff;\n }\n\n if (typeName === 'ASCII') {\n bin.writeASCII(data, toff, val);\n } else if (typeName === 'SHORT') {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(num, (i) => {\n bin.writeUshort(data, toff + (2 * i), val[i]);\n });\n } else if (typeName === 'LONG') {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(num, (i) => {\n bin.writeUint(data, toff + (4 * i), val[i]);\n });\n } else if (typeName === 'RATIONAL') {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(num, (i) => {\n bin.writeUint(data, toff + (8 * i), Math.round(val[i] * 10000));\n bin.writeUint(data, toff + (8 * i) + 4, 10000);\n });\n } else if (typeName === 'DOUBLE') {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(num, (i) => {\n bin.writeDouble(data, toff + (8 * i), val[i]);\n });\n }\n\n if (dlen > 4) {\n dlen += (dlen & 1);\n eoff += dlen;\n }\n\n offset += 4;\n }\n\n return [offset, eoff];\n};\n\nconst encodeIfds = (ifds) => {\n const data = new Uint8Array(numBytesInIfd);\n let offset = 4;\n const bin = _binBE;\n\n // set big-endian byte-order\n // https://en.wikipedia.org/wiki/TIFF#Byte_order\n data[0] = 77;\n data[1] = 77;\n\n // set format-version number\n // https://en.wikipedia.org/wiki/TIFF#Byte_order\n data[3] = 42;\n\n let ifdo = 8;\n\n bin.writeUint(data, offset, ifdo);\n\n offset += 4;\n\n ifds.forEach((ifd, i) => {\n const noffs = _writeIFD(bin, data, ifdo, ifd);\n ifdo = noffs[1];\n if (i < ifds.length - 1) {\n bin.writeUint(data, noffs[0], ifdo);\n }\n });\n\n if (data.slice) {\n return data.slice(0, ifdo).buffer;\n }\n\n // node hasn't implemented slice on Uint8Array yet\n const result = new Uint8Array(ifdo);\n for (let i = 0; i < ifdo; i++) {\n result[i] = data[i];\n }\n return result.buffer;\n};\n\nconst encodeImage = (values, width, height, metadata) => {\n if (height === undefined || height === null) {\n throw new Error(`you passed into encodeImage a width of type ${height}`);\n }\n\n if (width === undefined || width === null) {\n throw new Error(`you passed into encodeImage a width of type ${width}`);\n }\n\n const ifd = {\n 256: [width], // ImageWidth\n 257: [height], // ImageLength\n 273: [numBytesInIfd], // strips offset\n 278: [height], // RowsPerStrip\n 305: 'geotiff.js', // no array for ASCII(Z)\n };\n\n if (metadata) {\n for (const i in metadata) {\n if (metadata.hasOwnProperty(i)) {\n ifd[i] = metadata[i];\n }\n }\n }\n\n const prfx = new Uint8Array(encodeIfds([ifd]));\n\n const img = new Uint8Array(values);\n\n const samplesPerPixel = ifd[277];\n\n const data = new Uint8Array(numBytesInIfd + (width * height * samplesPerPixel));\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(prfx.length, (i) => {\n data[i] = prfx[i];\n });\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"forEach\"])(img, (value, i) => {\n data[numBytesInIfd + i] = value;\n });\n\n return data.buffer;\n};\n\nconst convertToTids = (input) => {\n const result = {};\n for (const key in input) {\n if (key !== 'StripOffsets') {\n if (!name2code[key]) {\n console.error(key, 'not in name2code:', Object.keys(name2code));\n }\n result[name2code[key]] = input[key];\n }\n }\n return result;\n};\n\nconst toArray = (input) => {\n if (Array.isArray(input)) {\n return input;\n }\n return [input];\n};\n\nconst metadataDefaults = [\n ['Compression', 1], // no compression\n ['PlanarConfiguration', 1],\n ['XPosition', 0],\n ['YPosition', 0],\n ['ResolutionUnit', 1], // Code 1 for actual pixel count or 2 for pixels per inch.\n ['ExtraSamples', 0], // should this be an array??\n ['GeoAsciiParams', 'WGS 84\\u0000'],\n ['ModelTiepoint', [0, 0, 0, -180, 90, 0]], // raster fits whole globe\n ['GTModelTypeGeoKey', 2],\n ['GTRasterTypeGeoKey', 1],\n ['GeographicTypeGeoKey', 4326],\n ['GeogCitationGeoKey', 'WGS 84'],\n];\n\nfunction writeGeotiff(data, metadata) {\n const isFlattened = typeof data[0] === 'number';\n\n let height;\n let numBands;\n let width;\n let flattenedValues;\n\n if (isFlattened) {\n height = metadata.height || metadata.ImageLength;\n width = metadata.width || metadata.ImageWidth;\n numBands = data.length / (height * width);\n flattenedValues = data;\n } else {\n numBands = data.length;\n height = data[0].length;\n width = data[0][0].length;\n flattenedValues = [];\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(height, (rowIndex) => {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(width, (columnIndex) => {\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(numBands, (bandIndex) => {\n flattenedValues.push(data[bandIndex][rowIndex][columnIndex]);\n });\n });\n });\n }\n\n metadata.ImageLength = height;\n delete metadata.height;\n metadata.ImageWidth = width;\n delete metadata.width;\n\n // consult https://www.loc.gov/preservation/digital/formats/content/tiff_tags.shtml\n\n if (!metadata.BitsPerSample) {\n metadata.BitsPerSample = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(numBands, () => 8);\n }\n\n metadataDefaults.forEach((tag) => {\n const key = tag[0];\n if (!metadata[key]) {\n const value = tag[1];\n metadata[key] = value;\n }\n });\n\n // The color space of the image data.\n // 1=black is zero and 2=RGB.\n if (!metadata.PhotometricInterpretation) {\n metadata.PhotometricInterpretation = metadata.BitsPerSample.length === 3 ? 2 : 1;\n }\n\n // The number of components per pixel.\n if (!metadata.SamplesPerPixel) {\n metadata.SamplesPerPixel = [numBands];\n }\n\n if (!metadata.StripByteCounts) {\n // we are only writing one strip\n metadata.StripByteCounts = [numBands * height * width];\n }\n\n if (!metadata.ModelPixelScale) {\n // assumes raster takes up exactly the whole globe\n metadata.ModelPixelScale = [360 / width, 180 / height, 0];\n }\n\n if (!metadata.SampleFormat) {\n metadata.SampleFormat = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"times\"])(numBands, () => 1);\n }\n\n\n const geoKeys = Object.keys(metadata)\n .filter((key) => Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\"endsWith\"])(key, 'GeoKey'))\n .sort((a, b) => name2code[a] - name2code[b]);\n\n if (!metadata.GeoKeyDirectory) {\n const NumberOfKeys = geoKeys.length;\n\n const GeoKeyDirectory = [1, 1, 0, NumberOfKeys];\n geoKeys.forEach((geoKey) => {\n const KeyID = Number(name2code[geoKey]);\n GeoKeyDirectory.push(KeyID);\n\n let Count;\n let TIFFTagLocation;\n let valueOffset;\n if (_globals__WEBPACK_IMPORTED_MODULE_0__[\"fieldTagTypes\"][KeyID] === 'SHORT') {\n Count = 1;\n TIFFTagLocation = 0;\n valueOffset = metadata[geoKey];\n } else if (geoKey === 'GeogCitationGeoKey') {\n Count = metadata.GeoAsciiParams.length;\n TIFFTagLocation = Number(name2code.GeoAsciiParams);\n valueOffset = 0;\n } else {\n console.log(`[geotiff.js] couldn't get TIFFTagLocation for ${geoKey}`);\n }\n GeoKeyDirectory.push(TIFFTagLocation);\n GeoKeyDirectory.push(Count);\n GeoKeyDirectory.push(valueOffset);\n });\n metadata.GeoKeyDirectory = GeoKeyDirectory;\n }\n\n // delete GeoKeys from metadata, because stored in GeoKeyDirectory tag\n for (const geoKey in geoKeys) {\n if (geoKeys.hasOwnProperty(geoKey)) {\n delete metadata[geoKey];\n }\n }\n\n [\n 'Compression',\n 'ExtraSamples',\n 'GeographicTypeGeoKey',\n 'GTModelTypeGeoKey',\n 'GTRasterTypeGeoKey',\n 'ImageLength', // synonym of ImageHeight\n 'ImageWidth',\n 'PhotometricInterpretation',\n 'PlanarConfiguration',\n 'ResolutionUnit',\n 'SamplesPerPixel',\n 'XPosition',\n 'YPosition',\n ].forEach((name) => {\n if (metadata[name]) {\n metadata[name] = toArray(metadata[name]);\n }\n });\n\n\n const encodedMetadata = convertToTids(metadata);\n\n const outputImage = encodeImage(flattenedValues, width, height, encodedMetadata);\n\n return outputImage;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiffwriter.js?");
297
298/***/ }),
299
300/***/ "./node_modules/geotiff/src/globals.js":
301/*!*********************************************!*\
302 !*** ./node_modules/geotiff/src/globals.js ***!
303 \*********************************************/
304/*! exports provided: fieldTagNames, fieldTags, fieldTagTypes, arrayFields, fieldTypeNames, fieldTypes, photometricInterpretations, ExtraSamplesValues, geoKeyNames, geoKeys */
305/***/ (function(module, __webpack_exports__, __webpack_require__) {
306
307"use strict";
308eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fieldTagNames\", function() { return fieldTagNames; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fieldTags\", function() { return fieldTags; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fieldTagTypes\", function() { return fieldTagTypes; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"arrayFields\", function() { return arrayFields; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fieldTypeNames\", function() { return fieldTypeNames; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fieldTypes\", function() { return fieldTypes; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"photometricInterpretations\", function() { return photometricInterpretations; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"ExtraSamplesValues\", function() { return ExtraSamplesValues; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"geoKeyNames\", function() { return geoKeyNames; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"geoKeys\", function() { return geoKeys; });\nconst fieldTagNames = {\n // TIFF Baseline\n 0x013B: 'Artist',\n 0x0102: 'BitsPerSample',\n 0x0109: 'CellLength',\n 0x0108: 'CellWidth',\n 0x0140: 'ColorMap',\n 0x0103: 'Compression',\n 0x8298: 'Copyright',\n 0x0132: 'DateTime',\n 0x0152: 'ExtraSamples',\n 0x010A: 'FillOrder',\n 0x0121: 'FreeByteCounts',\n 0x0120: 'FreeOffsets',\n 0x0123: 'GrayResponseCurve',\n 0x0122: 'GrayResponseUnit',\n 0x013C: 'HostComputer',\n 0x010E: 'ImageDescription',\n 0x0101: 'ImageLength',\n 0x0100: 'ImageWidth',\n 0x010F: 'Make',\n 0x0119: 'MaxSampleValue',\n 0x0118: 'MinSampleValue',\n 0x0110: 'Model',\n 0x00FE: 'NewSubfileType',\n 0x0112: 'Orientation',\n 0x0106: 'PhotometricInterpretation',\n 0x011C: 'PlanarConfiguration',\n 0x0128: 'ResolutionUnit',\n 0x0116: 'RowsPerStrip',\n 0x0115: 'SamplesPerPixel',\n 0x0131: 'Software',\n 0x0117: 'StripByteCounts',\n 0x0111: 'StripOffsets',\n 0x00FF: 'SubfileType',\n 0x0107: 'Threshholding',\n 0x011A: 'XResolution',\n 0x011B: 'YResolution',\n\n // TIFF Extended\n 0x0146: 'BadFaxLines',\n 0x0147: 'CleanFaxData',\n 0x0157: 'ClipPath',\n 0x0148: 'ConsecutiveBadFaxLines',\n 0x01B1: 'Decode',\n 0x01B2: 'DefaultImageColor',\n 0x010D: 'DocumentName',\n 0x0150: 'DotRange',\n 0x0141: 'HalftoneHints',\n 0x015A: 'Indexed',\n 0x015B: 'JPEGTables',\n 0x011D: 'PageName',\n 0x0129: 'PageNumber',\n 0x013D: 'Predictor',\n 0x013F: 'PrimaryChromaticities',\n 0x0214: 'ReferenceBlackWhite',\n 0x0153: 'SampleFormat',\n 0x0154: 'SMinSampleValue',\n 0x0155: 'SMaxSampleValue',\n 0x022F: 'StripRowCounts',\n 0x014A: 'SubIFDs',\n 0x0124: 'T4Options',\n 0x0125: 'T6Options',\n 0x0145: 'TileByteCounts',\n 0x0143: 'TileLength',\n 0x0144: 'TileOffsets',\n 0x0142: 'TileWidth',\n 0x012D: 'TransferFunction',\n 0x013E: 'WhitePoint',\n 0x0158: 'XClipPathUnits',\n 0x011E: 'XPosition',\n 0x0211: 'YCbCrCoefficients',\n 0x0213: 'YCbCrPositioning',\n 0x0212: 'YCbCrSubSampling',\n 0x0159: 'YClipPathUnits',\n 0x011F: 'YPosition',\n\n // EXIF\n 0x9202: 'ApertureValue',\n 0xA001: 'ColorSpace',\n 0x9004: 'DateTimeDigitized',\n 0x9003: 'DateTimeOriginal',\n 0x8769: 'Exif IFD',\n 0x9000: 'ExifVersion',\n 0x829A: 'ExposureTime',\n 0xA300: 'FileSource',\n 0x9209: 'Flash',\n 0xA000: 'FlashpixVersion',\n 0x829D: 'FNumber',\n 0xA420: 'ImageUniqueID',\n 0x9208: 'LightSource',\n 0x927C: 'MakerNote',\n 0x9201: 'ShutterSpeedValue',\n 0x9286: 'UserComment',\n\n // IPTC\n 0x83BB: 'IPTC',\n\n // ICC\n 0x8773: 'ICC Profile',\n\n // XMP\n 0x02BC: 'XMP',\n\n // GDAL\n 0xA480: 'GDAL_METADATA',\n 0xA481: 'GDAL_NODATA',\n\n // Photoshop\n 0x8649: 'Photoshop',\n\n // GeoTiff\n 0x830E: 'ModelPixelScale',\n 0x8482: 'ModelTiepoint',\n 0x85D8: 'ModelTransformation',\n 0x87AF: 'GeoKeyDirectory',\n 0x87B0: 'GeoDoubleParams',\n 0x87B1: 'GeoAsciiParams',\n};\n\nconst fieldTags = {};\nfor (const key in fieldTagNames) {\n if (fieldTagNames.hasOwnProperty(key)) {\n fieldTags[fieldTagNames[key]] = parseInt(key, 10);\n }\n}\n\nconst fieldTagTypes = {\n 256: 'SHORT',\n 257: 'SHORT',\n 258: 'SHORT',\n 259: 'SHORT',\n 262: 'SHORT',\n 273: 'LONG',\n 274: 'SHORT',\n 277: 'SHORT',\n 278: 'LONG',\n 279: 'LONG',\n 282: 'RATIONAL',\n 283: 'RATIONAL',\n 284: 'SHORT',\n 286: 'SHORT',\n 287: 'RATIONAL',\n 296: 'SHORT',\n 305: 'ASCII',\n 306: 'ASCII',\n 338: 'SHORT',\n 339: 'SHORT',\n 513: 'LONG',\n 514: 'LONG',\n 1024: 'SHORT',\n 1025: 'SHORT',\n 2048: 'SHORT',\n 2049: 'ASCII',\n 33550: 'DOUBLE',\n 33922: 'DOUBLE',\n 34665: 'LONG',\n 34735: 'SHORT',\n 34737: 'ASCII',\n 42113: 'ASCII',\n};\n\nconst arrayFields = [\n fieldTags.BitsPerSample,\n fieldTags.ExtraSamples,\n fieldTags.SampleFormat,\n fieldTags.StripByteCounts,\n fieldTags.StripOffsets,\n fieldTags.StripRowCounts,\n fieldTags.TileByteCounts,\n fieldTags.TileOffsets,\n];\n\nconst fieldTypeNames = {\n 0x0001: 'BYTE',\n 0x0002: 'ASCII',\n 0x0003: 'SHORT',\n 0x0004: 'LONG',\n 0x0005: 'RATIONAL',\n 0x0006: 'SBYTE',\n 0x0007: 'UNDEFINED',\n 0x0008: 'SSHORT',\n 0x0009: 'SLONG',\n 0x000A: 'SRATIONAL',\n 0x000B: 'FLOAT',\n 0x000C: 'DOUBLE',\n // IFD offset, suggested by https://owl.phy.queensu.ca/~phil/exiftool/standards.html\n 0x000D: 'IFD',\n // introduced by BigTIFF\n 0x0010: 'LONG8',\n 0x0011: 'SLONG8',\n 0x0012: 'IFD8',\n};\n\nconst fieldTypes = {};\nfor (const key in fieldTypeNames) {\n if (fieldTypeNames.hasOwnProperty(key)) {\n fieldTypes[fieldTypeNames[key]] = parseInt(key, 10);\n }\n}\n\nconst photometricInterpretations = {\n WhiteIsZero: 0,\n BlackIsZero: 1,\n RGB: 2,\n Palette: 3,\n TransparencyMask: 4,\n CMYK: 5,\n YCbCr: 6,\n\n CIELab: 8,\n ICCLab: 9,\n};\n\nconst ExtraSamplesValues = {\n Unspecified: 0,\n Assocalpha: 1,\n Unassalpha: 2,\n};\n\n\nconst geoKeyNames = {\n 1024: 'GTModelTypeGeoKey',\n 1025: 'GTRasterTypeGeoKey',\n 1026: 'GTCitationGeoKey',\n 2048: 'GeographicTypeGeoKey',\n 2049: 'GeogCitationGeoKey',\n 2050: 'GeogGeodeticDatumGeoKey',\n 2051: 'GeogPrimeMeridianGeoKey',\n 2052: 'GeogLinearUnitsGeoKey',\n 2053: 'GeogLinearUnitSizeGeoKey',\n 2054: 'GeogAngularUnitsGeoKey',\n 2055: 'GeogAngularUnitSizeGeoKey',\n 2056: 'GeogEllipsoidGeoKey',\n 2057: 'GeogSemiMajorAxisGeoKey',\n 2058: 'GeogSemiMinorAxisGeoKey',\n 2059: 'GeogInvFlatteningGeoKey',\n 2060: 'GeogAzimuthUnitsGeoKey',\n 2061: 'GeogPrimeMeridianLongGeoKey',\n 2062: 'GeogTOWGS84GeoKey',\n 3072: 'ProjectedCSTypeGeoKey',\n 3073: 'PCSCitationGeoKey',\n 3074: 'ProjectionGeoKey',\n 3075: 'ProjCoordTransGeoKey',\n 3076: 'ProjLinearUnitsGeoKey',\n 3077: 'ProjLinearUnitSizeGeoKey',\n 3078: 'ProjStdParallel1GeoKey',\n 3079: 'ProjStdParallel2GeoKey',\n 3080: 'ProjNatOriginLongGeoKey',\n 3081: 'ProjNatOriginLatGeoKey',\n 3082: 'ProjFalseEastingGeoKey',\n 3083: 'ProjFalseNorthingGeoKey',\n 3084: 'ProjFalseOriginLongGeoKey',\n 3085: 'ProjFalseOriginLatGeoKey',\n 3086: 'ProjFalseOriginEastingGeoKey',\n 3087: 'ProjFalseOriginNorthingGeoKey',\n 3088: 'ProjCenterLongGeoKey',\n 3089: 'ProjCenterLatGeoKey',\n 3090: 'ProjCenterEastingGeoKey',\n 3091: 'ProjCenterNorthingGeoKey',\n 3092: 'ProjScaleAtNatOriginGeoKey',\n 3093: 'ProjScaleAtCenterGeoKey',\n 3094: 'ProjAzimuthAngleGeoKey',\n 3095: 'ProjStraightVertPoleLongGeoKey',\n 3096: 'ProjRectifiedGridAngleGeoKey',\n 4096: 'VerticalCSTypeGeoKey',\n 4097: 'VerticalCitationGeoKey',\n 4098: 'VerticalDatumGeoKey',\n 4099: 'VerticalUnitsGeoKey',\n};\n\nconst geoKeys = {};\nfor (const key in geoKeyNames) {\n if (geoKeyNames.hasOwnProperty(key)) {\n geoKeys[geoKeyNames[key]] = parseInt(key, 10);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/globals.js?");
309
310/***/ }),
311
312/***/ "./node_modules/geotiff/src/logging.js":
313/*!*********************************************!*\
314 !*** ./node_modules/geotiff/src/logging.js ***!
315 \*********************************************/
316/*! exports provided: setLogger, log, info, warn, error, time, timeEnd */
317/***/ (function(module, __webpack_exports__, __webpack_require__) {
318
319"use strict";
320eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"setLogger\", function() { return setLogger; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"log\", function() { return log; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"info\", function() { return info; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"warn\", function() { return warn; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"error\", function() { return error; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"time\", function() { return time; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"timeEnd\", function() { return timeEnd; });\n\n/**\n * A no-op logger\n */\nclass DummyLogger {\n log() {}\n\n info() {}\n\n warn() {}\n\n error() {}\n\n time() {}\n\n timeEnd() {}\n}\n\nlet LOGGER = new DummyLogger();\n\n/**\n *\n * @param {object} logger the new logger. e.g `console`\n */\nfunction setLogger(logger = new DummyLogger()) {\n LOGGER = logger;\n}\n\nfunction log(...args) {\n return LOGGER.log(...args);\n}\n\nfunction info(...args) {\n return LOGGER.info(...args);\n}\n\nfunction warn(...args) {\n return LOGGER.warn(...args);\n}\n\nfunction error(...args) {\n return LOGGER.error(...args);\n}\n\nfunction time(...args) {\n return LOGGER.time(...args);\n}\n\nfunction timeEnd(...args) {\n return LOGGER.timeEnd(...args);\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/logging.js?");
321
322/***/ }),
323
324/***/ "./node_modules/geotiff/src/pool.js":
325/*!******************************************!*\
326 !*** ./node_modules/geotiff/src/pool.js ***!
327 \******************************************/
328/*! exports provided: default */
329/***/ (function(module, __webpack_exports__, __webpack_require__) {
330
331"use strict";
332eval("__webpack_require__.r(__webpack_exports__);\n/* WEBPACK VAR INJECTION */(function(__webpack__worker__0) {/* harmony import */ var threads__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! threads */ \"./node_modules/threads/dist-esm/index.js\");\n\n\nconst defaultPoolSize = typeof navigator !== 'undefined' ? navigator.hardwareConcurrency : null;\n\n/**\n * @module pool\n */\n\n/**\n * Pool for workers to decode chunks of the images.\n */\nclass Pool {\n /**\n * @constructor\n * @param {Number} size The size of the pool. Defaults to the number of CPUs\n * available. When this parameter is `null` or 0, then the\n * decoding will be done in the main thread.\n */\n constructor(size = defaultPoolSize) {\n const worker = new threads__WEBPACK_IMPORTED_MODULE_0__[\"Worker\"](__webpack__worker__0);\n this.pool = Object(threads__WEBPACK_IMPORTED_MODULE_0__[\"Pool\"])(() => Object(threads__WEBPACK_IMPORTED_MODULE_0__[\"spawn\"])(worker), size);\n }\n\n /**\n * Decode the given block of bytes with the set compression method.\n * @param {ArrayBuffer} buffer the array buffer of bytes to decode.\n * @returns {Promise.<ArrayBuffer>} the decoded result as a `Promise`\n */\n async decode(fileDirectory, buffer) {\n return new Promise((resolve, reject) => {\n this.pool.queue(async (decode) => {\n try {\n const data = await decode(fileDirectory, buffer);\n resolve(data);\n } catch (err) {\n reject(err);\n }\n });\n });\n }\n\n destroy() {\n this.pool.terminate(true);\n }\n}\n\n/* harmony default export */ __webpack_exports__[\"default\"] = (Pool);\n\n/* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./node_modules/threads-plugin/dist/loader.js?{\"name\":\"0\"}!./decoder.worker.js */ \"./node_modules/threads-plugin/dist/loader.js?{\\\"name\\\":\\\"0\\\"}!./node_modules/geotiff/src/decoder.worker.js\")))\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/pool.js?");
333
334/***/ }),
335
336/***/ "./node_modules/geotiff/src/predictor.js":
337/*!***********************************************!*\
338 !*** ./node_modules/geotiff/src/predictor.js ***!
339 \***********************************************/
340/*! exports provided: applyPredictor */
341/***/ (function(module, __webpack_exports__, __webpack_require__) {
342
343"use strict";
344eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"applyPredictor\", function() { return applyPredictor; });\n\nfunction decodeRowAcc(row, stride) {\n let length = row.length - stride;\n let offset = 0;\n do {\n for (let i = stride; i > 0; i--) {\n row[offset + stride] += row[offset];\n offset++;\n }\n\n length -= stride;\n } while (length > 0);\n}\n\nfunction decodeRowFloatingPoint(row, stride, bytesPerSample) {\n let index = 0;\n let count = row.length;\n const wc = count / bytesPerSample;\n\n while (count > stride) {\n for (let i = stride; i > 0; --i) {\n row[index + stride] += row[index];\n ++index;\n }\n count -= stride;\n }\n\n const copy = row.slice();\n for (let i = 0; i < wc; ++i) {\n for (let b = 0; b < bytesPerSample; ++b) {\n row[(bytesPerSample * i) + b] = copy[((bytesPerSample - b - 1) * wc) + i];\n }\n }\n}\n\nfunction applyPredictor(block, predictor, width, height, bitsPerSample,\n planarConfiguration) {\n if (!predictor || predictor === 1) {\n return block;\n }\n\n for (let i = 0; i < bitsPerSample.length; ++i) {\n if (bitsPerSample[i] % 8 !== 0) {\n throw new Error('When decoding with predictor, only multiple of 8 bits are supported.');\n }\n if (bitsPerSample[i] !== bitsPerSample[0]) {\n throw new Error('When decoding with predictor, all samples must have the same size.');\n }\n }\n\n const bytesPerSample = bitsPerSample[0] / 8;\n const stride = planarConfiguration === 2 ? 1 : bitsPerSample.length;\n\n for (let i = 0; i < height; ++i) {\n // Last strip will be truncated if height % stripHeight != 0\n if (i * stride * width * bytesPerSample >= block.byteLength) {\n break;\n }\n let row;\n if (predictor === 2) { // horizontal prediction\n switch (bitsPerSample[0]) {\n case 8:\n row = new Uint8Array(\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,\n );\n break;\n case 16:\n row = new Uint16Array(\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 2,\n );\n break;\n case 32:\n row = new Uint32Array(\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 4,\n );\n break;\n default:\n throw new Error(`Predictor 2 not allowed with ${bitsPerSample[0]} bits per sample.`);\n }\n decodeRowAcc(row, stride, bytesPerSample);\n } else if (predictor === 3) { // horizontal floating point\n row = new Uint8Array(\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,\n );\n decodeRowFloatingPoint(row, stride, bytesPerSample);\n }\n }\n return block;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/predictor.js?");
345
346/***/ }),
347
348/***/ "./node_modules/geotiff/src/resample.js":
349/*!**********************************************!*\
350 !*** ./node_modules/geotiff/src/resample.js ***!
351 \**********************************************/
352/*! exports provided: resampleNearest, resampleBilinear, resample, resampleNearestInterleaved, resampleBilinearInterleaved, resampleInterleaved */
353/***/ (function(module, __webpack_exports__, __webpack_require__) {
354
355"use strict";
356eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resampleNearest\", function() { return resampleNearest; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resampleBilinear\", function() { return resampleBilinear; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resample\", function() { return resample; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resampleNearestInterleaved\", function() { return resampleNearestInterleaved; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resampleBilinearInterleaved\", function() { return resampleBilinearInterleaved; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"resampleInterleaved\", function() { return resampleInterleaved; });\n/**\n * @module resample\n */\n\nfunction copyNewSize(array, width, height, samplesPerPixel = 1) {\n return new (Object.getPrototypeOf(array).constructor)(width * height * samplesPerPixel);\n}\n\n/**\n * Resample the input arrays using nearest neighbor value selection.\n * @param {TypedArray[]} valueArrays The input arrays to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @returns {TypedArray[]} The resampled rasters\n */\nfunction resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight) {\n const relX = inWidth / outWidth;\n const relY = inHeight / outHeight;\n return valueArrays.map((array) => {\n const newArray = copyNewSize(array, outWidth, outHeight);\n for (let y = 0; y < outHeight; ++y) {\n const cy = Math.min(Math.round(relY * y), inHeight - 1);\n for (let x = 0; x < outWidth; ++x) {\n const cx = Math.min(Math.round(relX * x), inWidth - 1);\n const value = array[(cy * inWidth) + cx];\n newArray[(y * outWidth) + x] = value;\n }\n }\n return newArray;\n });\n}\n\n// simple linear interpolation, code from:\n// https://en.wikipedia.org/wiki/Linear_interpolation#Programming_language_support\nfunction lerp(v0, v1, t) {\n return ((1 - t) * v0) + (t * v1);\n}\n\n/**\n * Resample the input arrays using bilinear interpolation.\n * @param {TypedArray[]} valueArrays The input arrays to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @returns {TypedArray[]} The resampled rasters\n */\nfunction resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight) {\n const relX = inWidth / outWidth;\n const relY = inHeight / outHeight;\n\n return valueArrays.map((array) => {\n const newArray = copyNewSize(array, outWidth, outHeight);\n for (let y = 0; y < outHeight; ++y) {\n const rawY = relY * y;\n\n const yl = Math.floor(rawY);\n const yh = Math.min(Math.ceil(rawY), (inHeight - 1));\n\n for (let x = 0; x < outWidth; ++x) {\n const rawX = relX * x;\n const tx = rawX % 1;\n\n const xl = Math.floor(rawX);\n const xh = Math.min(Math.ceil(rawX), (inWidth - 1));\n\n const ll = array[(yl * inWidth) + xl];\n const hl = array[(yl * inWidth) + xh];\n const lh = array[(yh * inWidth) + xl];\n const hh = array[(yh * inWidth) + xh];\n\n const value = lerp(\n lerp(ll, hl, tx),\n lerp(lh, hh, tx),\n rawY % 1,\n );\n newArray[(y * outWidth) + x] = value;\n }\n }\n return newArray;\n });\n}\n\n/**\n * Resample the input arrays using the selected resampling method.\n * @param {TypedArray[]} valueArrays The input arrays to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @param {string} [method = 'nearest'] The desired resampling method\n * @returns {TypedArray[]} The resampled rasters\n */\nfunction resample(valueArrays, inWidth, inHeight, outWidth, outHeight, method = 'nearest') {\n switch (method.toLowerCase()) {\n case 'nearest':\n return resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight);\n case 'bilinear':\n case 'linear':\n return resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight);\n default:\n throw new Error(`Unsupported resampling method: '${method}'`);\n }\n}\n\n/**\n * Resample the pixel interleaved input array using nearest neighbor value selection.\n * @param {TypedArray} valueArrays The input arrays to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @param {number} samples The number of samples per pixel for pixel\n * interleaved data\n * @returns {TypedArray} The resampled raster\n */\nfunction resampleNearestInterleaved(\n valueArray, inWidth, inHeight, outWidth, outHeight, samples) {\n const relX = inWidth / outWidth;\n const relY = inHeight / outHeight;\n\n const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);\n for (let y = 0; y < outHeight; ++y) {\n const cy = Math.min(Math.round(relY * y), inHeight - 1);\n for (let x = 0; x < outWidth; ++x) {\n const cx = Math.min(Math.round(relX * x), inWidth - 1);\n for (let i = 0; i < samples; ++i) {\n const value = valueArray[(cy * inWidth * samples) + (cx * samples) + i];\n newArray[(y * outWidth * samples) + (x * samples) + i] = value;\n }\n }\n }\n return newArray;\n}\n\n/**\n * Resample the pixel interleaved input array using bilinear interpolation.\n * @param {TypedArray} valueArrays The input arrays to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @param {number} samples The number of samples per pixel for pixel\n * interleaved data\n * @returns {TypedArray} The resampled raster\n */\nfunction resampleBilinearInterleaved(\n valueArray, inWidth, inHeight, outWidth, outHeight, samples) {\n const relX = inWidth / outWidth;\n const relY = inHeight / outHeight;\n const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);\n for (let y = 0; y < outHeight; ++y) {\n const rawY = relY * y;\n\n const yl = Math.floor(rawY);\n const yh = Math.min(Math.ceil(rawY), (inHeight - 1));\n\n for (let x = 0; x < outWidth; ++x) {\n const rawX = relX * x;\n const tx = rawX % 1;\n\n const xl = Math.floor(rawX);\n const xh = Math.min(Math.ceil(rawX), (inWidth - 1));\n\n for (let i = 0; i < samples; ++i) {\n const ll = valueArray[(yl * inWidth * samples) + (xl * samples) + i];\n const hl = valueArray[(yl * inWidth * samples) + (xh * samples) + i];\n const lh = valueArray[(yh * inWidth * samples) + (xl * samples) + i];\n const hh = valueArray[(yh * inWidth * samples) + (xh * samples) + i];\n\n const value = lerp(\n lerp(ll, hl, tx),\n lerp(lh, hh, tx),\n rawY % 1,\n );\n newArray[(y * outWidth * samples) + (x * samples) + i] = value;\n }\n }\n }\n return newArray;\n}\n\n/**\n * Resample the pixel interleaved input array using the selected resampling method.\n * @param {TypedArray} valueArray The input array to resample\n * @param {number} inWidth The width of the input rasters\n * @param {number} inHeight The height of the input rasters\n * @param {number} outWidth The desired width of the output rasters\n * @param {number} outHeight The desired height of the output rasters\n * @param {number} samples The number of samples per pixel for pixel\n * interleaved data\n * @param {string} [method = 'nearest'] The desired resampling method\n * @returns {TypedArray} The resampled rasters\n */\nfunction resampleInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples, method = 'nearest') {\n switch (method.toLowerCase()) {\n case 'nearest':\n return resampleNearestInterleaved(\n valueArray, inWidth, inHeight, outWidth, outHeight, samples,\n );\n case 'bilinear':\n case 'linear':\n return resampleBilinearInterleaved(\n valueArray, inWidth, inHeight, outWidth, outHeight, samples,\n );\n default:\n throw new Error(`Unsupported resampling method: '${method}'`);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/resample.js?");
357
358/***/ }),
359
360/***/ "./node_modules/geotiff/src/rgb.js":
361/*!*****************************************!*\
362 !*** ./node_modules/geotiff/src/rgb.js ***!
363 \*****************************************/
364/*! exports provided: fromWhiteIsZero, fromBlackIsZero, fromPalette, fromCMYK, fromYCbCr, fromCIELab */
365/***/ (function(module, __webpack_exports__, __webpack_require__) {
366
367"use strict";
368eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromWhiteIsZero\", function() { return fromWhiteIsZero; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromBlackIsZero\", function() { return fromBlackIsZero; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromPalette\", function() { return fromPalette; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromCMYK\", function() { return fromCMYK; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromYCbCr\", function() { return fromYCbCr; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"fromCIELab\", function() { return fromCIELab; });\nfunction fromWhiteIsZero(raster, max) {\n const { width, height } = raster;\n const rgbRaster = new Uint8Array(width * height * 3);\n let value;\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\n value = 256 - (raster[i] / max * 256);\n rgbRaster[j] = value;\n rgbRaster[j + 1] = value;\n rgbRaster[j + 2] = value;\n }\n return rgbRaster;\n}\n\nfunction fromBlackIsZero(raster, max) {\n const { width, height } = raster;\n const rgbRaster = new Uint8Array(width * height * 3);\n let value;\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\n value = raster[i] / max * 256;\n rgbRaster[j] = value;\n rgbRaster[j + 1] = value;\n rgbRaster[j + 2] = value;\n }\n return rgbRaster;\n}\n\nfunction fromPalette(raster, colorMap) {\n const { width, height } = raster;\n const rgbRaster = new Uint8Array(width * height * 3);\n const greenOffset = colorMap.length / 3;\n const blueOffset = colorMap.length / 3 * 2;\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\n const mapIndex = raster[i];\n rgbRaster[j] = colorMap[mapIndex] / 65536 * 256;\n rgbRaster[j + 1] = colorMap[mapIndex + greenOffset] / 65536 * 256;\n rgbRaster[j + 2] = colorMap[mapIndex + blueOffset] / 65536 * 256;\n }\n return rgbRaster;\n}\n\nfunction fromCMYK(cmykRaster) {\n const { width, height } = cmykRaster;\n const rgbRaster = new Uint8Array(width * height * 3);\n for (let i = 0, j = 0; i < cmykRaster.length; i += 4, j += 3) {\n const c = cmykRaster[i];\n const m = cmykRaster[i + 1];\n const y = cmykRaster[i + 2];\n const k = cmykRaster[i + 3];\n\n rgbRaster[j] = 255 * ((255 - c) / 256) * ((255 - k) / 256);\n rgbRaster[j + 1] = 255 * ((255 - m) / 256) * ((255 - k) / 256);\n rgbRaster[j + 2] = 255 * ((255 - y) / 256) * ((255 - k) / 256);\n }\n return rgbRaster;\n}\n\nfunction fromYCbCr(yCbCrRaster) {\n const { width, height } = yCbCrRaster;\n const rgbRaster = new Uint8ClampedArray(width * height * 3);\n for (let i = 0, j = 0; i < yCbCrRaster.length; i += 3, j += 3) {\n const y = yCbCrRaster[i];\n const cb = yCbCrRaster[i + 1];\n const cr = yCbCrRaster[i + 2];\n\n rgbRaster[j] = (y + (1.40200 * (cr - 0x80)));\n rgbRaster[j + 1] = (y - (0.34414 * (cb - 0x80)) - (0.71414 * (cr - 0x80)));\n rgbRaster[j + 2] = (y + (1.77200 * (cb - 0x80)));\n }\n return rgbRaster;\n}\n\nconst Xn = 0.95047;\nconst Yn = 1.00000;\nconst Zn = 1.08883;\n\n// from https://github.com/antimatter15/rgb-lab/blob/master/color.js\n\nfunction fromCIELab(cieLabRaster) {\n const { width, height } = cieLabRaster;\n const rgbRaster = new Uint8Array(width * height * 3);\n\n for (let i = 0, j = 0; i < cieLabRaster.length; i += 3, j += 3) {\n const L = cieLabRaster[i + 0];\n const a_ = cieLabRaster[i + 1] << 24 >> 24; // conversion from uint8 to int8\n const b_ = cieLabRaster[i + 2] << 24 >> 24; // same\n\n let y = (L + 16) / 116;\n let x = (a_ / 500) + y;\n let z = y - (b_ / 200);\n let r;\n let g;\n let b;\n\n x = Xn * ((x * x * x > 0.008856) ? x * x * x : (x - (16 / 116)) / 7.787);\n y = Yn * ((y * y * y > 0.008856) ? y * y * y : (y - (16 / 116)) / 7.787);\n z = Zn * ((z * z * z > 0.008856) ? z * z * z : (z - (16 / 116)) / 7.787);\n\n r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);\n g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);\n b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);\n\n r = (r > 0.0031308) ? ((1.055 * (r ** (1 / 2.4))) - 0.055) : 12.92 * r;\n g = (g > 0.0031308) ? ((1.055 * (g ** (1 / 2.4))) - 0.055) : 12.92 * g;\n b = (b > 0.0031308) ? ((1.055 * (b ** (1 / 2.4))) - 0.055) : 12.92 * b;\n\n rgbRaster[j] = Math.max(0, Math.min(1, r)) * 255;\n rgbRaster[j + 1] = Math.max(0, Math.min(1, g)) * 255;\n rgbRaster[j + 2] = Math.max(0, Math.min(1, b)) * 255;\n }\n return rgbRaster;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/rgb.js?");
369
370/***/ }),
371
372/***/ "./node_modules/geotiff/src/source.js":
373/*!********************************************!*\
374 !*** ./node_modules/geotiff/src/source.js ***!
375 \********************************************/
376/*! exports provided: makeFetchSource, makeXHRSource, makeHttpSource, makeRemoteSource, makeBufferSource, makeFileSource, makeFileReaderSource */
377/***/ (function(module, __webpack_exports__, __webpack_require__) {
378
379"use strict";
380eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeFetchSource\", function() { return makeFetchSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeXHRSource\", function() { return makeXHRSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeHttpSource\", function() { return makeHttpSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeRemoteSource\", function() { return makeRemoteSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeBufferSource\", function() { return makeBufferSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeFileSource\", function() { return makeFileSource; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"makeFileReaderSource\", function() { return makeFileReaderSource; });\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \"buffer\");\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(buffer__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! fs */ \"fs\");\n/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(fs__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var http__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! http */ \"http\");\n/* harmony import */ var http__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(http__WEBPACK_IMPORTED_MODULE_2__);\n/* harmony import */ var https__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! https */ \"https\");\n/* harmony import */ var https__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(https__WEBPACK_IMPORTED_MODULE_3__);\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! url */ \"url\");\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(url__WEBPACK_IMPORTED_MODULE_4__);\n\n\n\n\n\n\n\nfunction readRangeFromBlocks(blocks, rangeOffset, rangeLength) {\n const rangeTop = rangeOffset + rangeLength;\n const rangeData = new ArrayBuffer(rangeLength);\n const rangeView = new Uint8Array(rangeData);\n\n for (const block of blocks) {\n const delta = block.offset - rangeOffset;\n const topDelta = block.top - rangeTop;\n let blockInnerOffset = 0;\n let rangeInnerOffset = 0;\n let usedBlockLength;\n\n if (delta < 0) {\n blockInnerOffset = -delta;\n } else if (delta > 0) {\n rangeInnerOffset = delta;\n }\n\n if (topDelta < 0) {\n usedBlockLength = block.length - blockInnerOffset;\n } else {\n usedBlockLength = rangeTop - block.offset - blockInnerOffset;\n }\n\n const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);\n rangeView.set(blockView, rangeInnerOffset);\n }\n\n return rangeData;\n}\n\n/**\n * Interface for Source objects.\n * @interface Source\n */\n\n/**\n * @function Source#fetch\n * @summary The main method to retrieve the data from the source.\n * @param {number} offset The offset to read from in the source\n * @param {number} length The requested number of bytes\n */\n\n/**\n * @typedef {object} Block\n * @property {ArrayBuffer} data The actual data of the block.\n * @property {number} offset The actual offset of the block within the file.\n * @property {number} length The actual size of the block in bytes.\n */\n\n/**\n * Callback type for sources to request patches of data.\n * @callback requestCallback\n * @async\n * @param {number} offset The offset within the file.\n * @param {number} length The desired length of data to be read.\n * @returns {Promise<Block>} The block of data.\n */\n\n/**\n * @module source\n */\n\n/*\n * Split a list of identifiers to form groups of coherent ones\n */\nfunction getCoherentBlockGroups(blockIds) {\n if (blockIds.length === 0) {\n return [];\n }\n\n const groups = [];\n let current = [];\n groups.push(current);\n\n for (let i = 0; i < blockIds.length; ++i) {\n if (i === 0 || blockIds[i] === blockIds[i - 1] + 1) {\n current.push(blockIds[i]);\n } else {\n current = [blockIds[i]];\n groups.push(current);\n }\n }\n return groups;\n}\n\n\n/*\n * Promisified wrapper around 'setTimeout' to allow 'await'\n */\nasync function wait(milliseconds) {\n return new Promise((resolve) => setTimeout(resolve, milliseconds));\n}\n\n/**\n * BlockedSource - an abstraction of (remote) files.\n * @implements Source\n */\nclass BlockedSource {\n /**\n * @param {requestCallback} retrievalFunction Callback function to request data\n * @param {object} options Additional options\n * @param {object} options.blockSize Size of blocks to be fetched\n */\n constructor(retrievalFunction, { blockSize = 65536 } = {}) {\n this.retrievalFunction = retrievalFunction;\n this.blockSize = blockSize;\n\n // currently running block requests\n this.blockRequests = new Map();\n\n // already retrieved blocks\n this.blocks = new Map();\n\n // block ids waiting for a batched request. Either a Set or null\n this.blockIdsAwaitingRequest = null;\n }\n\n /**\n * Fetch a subset of the file.\n * @param {number} offset The offset within the file to read from.\n * @param {number} length The length in bytes to read from.\n * @returns {ArrayBuffer} The subset of the file.\n */\n async fetch(offset, length, immediate = false) {\n const top = offset + length;\n\n // calculate what blocks intersect the specified range (offset + length)\n // determine what blocks are already stored or beeing requested\n const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;\n const allBlockIds = [];\n const missingBlockIds = [];\n const blockRequests = [];\n\n for (let current = firstBlockOffset; current < top; current += this.blockSize) {\n const blockId = Math.floor(current / this.blockSize);\n if (!this.blocks.has(blockId) && !this.blockRequests.has(blockId)) {\n missingBlockIds.push(blockId);\n }\n if (this.blockRequests.has(blockId)) {\n blockRequests.push(this.blockRequests.get(blockId));\n }\n allBlockIds.push(blockId);\n }\n\n // determine whether there are already blocks in the queue to be requested\n // if so, add the missing blocks to this list\n if (!this.blockIdsAwaitingRequest) {\n this.blockIdsAwaitingRequest = new Set(missingBlockIds);\n } else {\n for (let i = 0; i < missingBlockIds.length; ++i) {\n const id = missingBlockIds[i];\n this.blockIdsAwaitingRequest.add(id);\n }\n }\n\n // in immediate mode, we don't want to wait for possible additional requests coming in\n if (!immediate) {\n await wait();\n }\n\n // determine if we are the thread to start the requests.\n if (this.blockIdsAwaitingRequest) {\n // get all coherent blocks as groups to be requested in a single request\n const groups = getCoherentBlockGroups(\n Array.from(this.blockIdsAwaitingRequest).sort(),\n );\n\n // iterate over all blocks\n for (const group of groups) {\n // fetch a group as in a single request\n const request = this.requestData(\n group[0] * this.blockSize, group.length * this.blockSize,\n );\n\n // for each block in the request, make a small 'splitter',\n // i.e: wait for the request to finish, then cut out the bytes for\n // that block and store it there.\n // we keep that as a promise in 'blockRequests' to allow waiting on\n // a single block.\n for (let i = 0; i < group.length; ++i) {\n const id = group[i];\n this.blockRequests.set(id, (async () => {\n const response = await request;\n const o = i * this.blockSize;\n const t = Math.min(o + this.blockSize, response.data.byteLength);\n const data = response.data.slice(o, t);\n this.blockRequests.delete(id);\n this.blocks.set(id, {\n data,\n offset: response.offset + o,\n length: data.byteLength,\n top: response.offset + t,\n });\n })());\n }\n }\n this.blockIdsAwaitingRequest = null;\n }\n\n // get a list of currently running requests for the blocks still missing\n const missingRequests = [];\n for (const blockId of missingBlockIds) {\n if (this.blockRequests.has(blockId)) {\n missingRequests.push(this.blockRequests.get(blockId));\n }\n }\n\n // wait for all missing requests to finish\n await Promise.all(missingRequests);\n await Promise.all(blockRequests);\n\n // now get all blocks for the request and return a summary buffer\n const blocks = allBlockIds.map((id) => this.blocks.get(id));\n return readRangeFromBlocks(blocks, offset, length);\n }\n\n async requestData(requestedOffset, requestedLength) {\n const response = await this.retrievalFunction(requestedOffset, requestedLength);\n if (!response.length) {\n response.length = response.data.byteLength;\n } else if (response.length !== response.data.byteLength) {\n response.data = response.data.slice(0, response.length);\n }\n response.top = response.offset + response.length;\n return response;\n }\n}\n\n/**\n * Create a new source to read from a remote file using the\n * [fetch]{@link https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API} API.\n * @param {string} url The URL to send requests to.\n * @param {Object} [options] Additional options.\n * @param {Number} [options.blockSize] The block size to use.\n * @param {object} [options.headers] Additional headers to be sent to the server.\n * @returns The constructed source\n */\nfunction makeFetchSource(url, { headers = {}, blockSize } = {}) {\n return new BlockedSource(async (offset, length) => {\n const response = await fetch(url, {\n headers: {\n ...headers, Range: `bytes=${offset}-${offset + length - 1}`,\n },\n });\n\n // check the response was okay and if the server actually understands range requests\n if (!response.ok) {\n throw new Error('Error fetching data.');\n } else if (response.status === 206) {\n const data = response.arrayBuffer\n ? await response.arrayBuffer() : (await response.buffer()).buffer;\n return {\n data,\n offset,\n length,\n };\n } else {\n const data = response.arrayBuffer\n ? await response.arrayBuffer() : (await response.buffer()).buffer;\n return {\n data,\n offset: 0,\n length: data.byteLength,\n };\n }\n }, { blockSize });\n}\n\n/**\n * Create a new source to read from a remote file using the\n * [XHR]{@link https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest} API.\n * @param {string} url The URL to send requests to.\n * @param {Object} [options] Additional options.\n * @param {Number} [options.blockSize] The block size to use.\n * @param {object} [options.headers] Additional headers to be sent to the server.\n * @returns The constructed source\n */\nfunction makeXHRSource(url, { headers = {}, blockSize } = {}) {\n return new BlockedSource(async (offset, length) => {\n return new Promise((resolve, reject) => {\n const request = new XMLHttpRequest();\n request.open('GET', url);\n request.responseType = 'arraybuffer';\n const requestHeaders = { ...headers, Range: `bytes=${offset}-${offset + length - 1}` };\n for (const [key, value] of Object.entries(requestHeaders)) {\n request.setRequestHeader(key, value);\n }\n\n request.onload = () => {\n const data = request.response;\n if (request.status === 206) {\n resolve({\n data,\n offset,\n length,\n });\n } else {\n resolve({\n data,\n offset: 0,\n length: data.byteLength,\n });\n }\n };\n request.onerror = reject;\n request.send();\n });\n }, { blockSize });\n}\n\n/**\n * Create a new source to read from a remote file using the node\n * [http]{@link https://nodejs.org/api/http.html} API.\n * @param {string} url The URL to send requests to.\n * @param {Object} [options] Additional options.\n * @param {Number} [options.blockSize] The block size to use.\n * @param {object} [options.headers] Additional headers to be sent to the server.\n */\nfunction makeHttpSource(url, { headers = {}, blockSize } = {}) {\n return new BlockedSource(async (offset, length) => new Promise((resolve, reject) => {\n const parsed = url__WEBPACK_IMPORTED_MODULE_4___default.a.parse(url);\n const request = (parsed.protocol === 'http:' ? http__WEBPACK_IMPORTED_MODULE_2___default.a : https__WEBPACK_IMPORTED_MODULE_3___default.a).get(\n { ...parsed,\n headers: {\n ...headers, Range: `bytes=${offset}-${offset + length - 1}`,\n } }, (result) => {\n const chunks = [];\n // collect chunks\n result.on('data', (chunk) => {\n chunks.push(chunk);\n });\n\n // concatenate all chunks and resolve the promise with the resulting buffer\n result.on('end', () => {\n const data = buffer__WEBPACK_IMPORTED_MODULE_0__[\"Buffer\"].concat(chunks).buffer;\n resolve({\n data,\n offset,\n length: data.byteLength,\n });\n });\n },\n );\n request.on('error', reject);\n }), { blockSize });\n}\n\n/**\n * Create a new source to read from a remote file. Uses either XHR, fetch or nodes http API.\n * @param {string} url The URL to send requests to.\n * @param {Object} [options] Additional options.\n * @param {Boolean} [options.forceXHR] Force the usage of XMLHttpRequest.\n * @param {Number} [options.blockSize] The block size to use.\n * @param {object} [options.headers] Additional headers to be sent to the server.\n * @returns The constructed source\n */\nfunction makeRemoteSource(url, options) {\n const { forceXHR } = options;\n if (typeof fetch === 'function' && !forceXHR) {\n return makeFetchSource(url, options);\n }\n if (typeof XMLHttpRequest !== 'undefined') {\n return makeXHRSource(url, options);\n }\n if (http__WEBPACK_IMPORTED_MODULE_2___default.a.get) {\n return makeHttpSource(url, options);\n }\n throw new Error('No remote source available');\n}\n\n/**\n * Create a new source to read from a local\n * [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.\n * @param {ArrayBuffer} arrayBuffer The ArrayBuffer to parse the GeoTIFF from.\n * @returns The constructed source\n */\nfunction makeBufferSource(arrayBuffer) {\n return {\n async fetch(offset, length) {\n return arrayBuffer.slice(offset, offset + length);\n },\n };\n}\n\nfunction closeAsync(fd) {\n return new Promise((resolve, reject) => {\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\"close\"])(fd, err => {\n if (err) {\n reject(err)\n } else {\n resolve()\n }\n });\n });\n}\n\nfunction openAsync(path, flags, mode = undefined) {\n return new Promise((resolve, reject) => {\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\"open\"])(path, flags, mode, (err, fd) => {\n if (err) {\n reject(err);\n } else {\n resolve(fd);\n }\n });\n });\n}\n\nfunction readAsync(...args) {\n return new Promise((resolve, reject) => {\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\"read\"])(...args, (err, bytesRead, buffer) => {\n if (err) {\n reject(err);\n } else {\n resolve({ bytesRead, buffer });\n }\n });\n });\n}\n\n/**\n * Creates a new source using the node filesystem API.\n * @param {string} path The path to the file in the local filesystem.\n * @returns The constructed source\n */\nfunction makeFileSource(path) {\n const fileOpen = openAsync(path, 'r');\n\n return {\n async fetch(offset, length) {\n const fd = await fileOpen;\n const { buffer } = await readAsync(fd, buffer__WEBPACK_IMPORTED_MODULE_0__[\"Buffer\"].alloc(length), 0, length, offset);\n return buffer.buffer;\n },\n async close() {\n const fd = await fileOpen;\n return await closeAsync(fd);\n },\n };\n}\n\n/**\n * Create a new source from a given file/blob.\n * @param {Blob} file The file or blob to read from.\n * @returns The constructed source\n */\nfunction makeFileReaderSource(file) {\n return {\n async fetch(offset, length) {\n return new Promise((resolve, reject) => {\n const blob = file.slice(offset, offset + length);\n const reader = new FileReader();\n reader.onload = (event) => resolve(event.target.result);\n reader.onerror = reject;\n reader.readAsArrayBuffer(blob);\n });\n },\n };\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/source.js?");
381
382/***/ }),
383
384/***/ "./node_modules/geotiff/src/utils.js":
385/*!*******************************************!*\
386 !*** ./node_modules/geotiff/src/utils.js ***!
387 \*******************************************/
388/*! exports provided: assign, chunk, endsWith, forEach, invert, range, times, toArray, toArrayRecursively */
389/***/ (function(module, __webpack_exports__, __webpack_require__) {
390
391"use strict";
392eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"assign\", function() { return assign; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"chunk\", function() { return chunk; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"endsWith\", function() { return endsWith; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"forEach\", function() { return forEach; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"invert\", function() { return invert; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"range\", function() { return range; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"times\", function() { return times; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"toArray\", function() { return toArray; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"toArrayRecursively\", function() { return toArrayRecursively; });\nfunction assign(target, source) {\n for (const key in source) {\n if (source.hasOwnProperty(key)) {\n target[key] = source[key];\n }\n }\n}\n\nfunction chunk(iterable, length) {\n const results = [];\n const lengthOfIterable = iterable.length;\n for (let i = 0; i < lengthOfIterable; i += length) {\n const chunked = [];\n for (let ci = i; ci < i + length; ci++) {\n chunked.push(iterable[ci]);\n }\n results.push(chunked);\n }\n return results;\n}\n\nfunction endsWith(string, expectedEnding) {\n if (string.length < expectedEnding.length) {\n return false;\n }\n const actualEnding = string.substr(string.length - expectedEnding.length);\n return actualEnding === expectedEnding;\n}\n\nfunction forEach(iterable, func) {\n const { length } = iterable;\n for (let i = 0; i < length; i++) {\n func(iterable[i], i);\n }\n}\n\nfunction invert(oldObj) {\n const newObj = {};\n for (const key in oldObj) {\n if (oldObj.hasOwnProperty(key)) {\n const value = oldObj[key];\n newObj[value] = key;\n }\n }\n return newObj;\n}\n\nfunction range(n) {\n const results = [];\n for (let i = 0; i < n; i++) {\n results.push(i);\n }\n return results;\n}\n\nfunction times(numTimes, func) {\n const results = [];\n for (let i = 0; i < numTimes; i++) {\n results.push(func(i));\n }\n return results;\n}\n\nfunction toArray(iterable) {\n const results = [];\n const { length } = iterable;\n for (let i = 0; i < length; i++) {\n results.push(iterable[i]);\n }\n return results;\n}\n\nfunction toArrayRecursively(input) {\n if (input.length) {\n return toArray(input).map(toArrayRecursively);\n }\n return input;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/utils.js?");
393
394/***/ }),
395
396/***/ "./node_modules/inherits/inherits.js":
397/*!*******************************************!*\
398 !*** ./node_modules/inherits/inherits.js ***!
399 \*******************************************/
400/*! no static exports found */
401/***/ (function(module, exports, __webpack_require__) {
402
403eval("try {\n var util = __webpack_require__(/*! util */ \"util\");\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = __webpack_require__(/*! ./inherits_browser.js */ \"./node_modules/inherits/inherits_browser.js\");\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/inherits/inherits.js?");
404
405/***/ }),
406
407/***/ "./node_modules/inherits/inherits_browser.js":
408/*!***************************************************!*\
409 !*** ./node_modules/inherits/inherits_browser.js ***!
410 \***************************************************/
411/*! no static exports found */
412/***/ (function(module, exports) {
413
414eval("if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/inherits/inherits_browser.js?");
415
416/***/ }),
417
418/***/ "./node_modules/is-observable/index.js":
419/*!*********************************************!*\
420 !*** ./node_modules/is-observable/index.js ***!
421 \*********************************************/
422/*! no static exports found */
423/***/ (function(module, exports, __webpack_require__) {
424
425"use strict";
426eval("\n\nmodule.exports = value => {\n\tif (!value) {\n\t\treturn false;\n\t}\n\n\t// eslint-disable-next-line no-use-extend-native/no-use-extend-native\n\tif (typeof Symbol.observable === 'symbol' && typeof value[Symbol.observable] === 'function') {\n\t\t// eslint-disable-next-line no-use-extend-native/no-use-extend-native\n\t\treturn value === value[Symbol.observable]();\n\t}\n\n\tif (typeof value['@@observable'] === 'function') {\n\t\treturn value === value['@@observable']();\n\t}\n\n\treturn false;\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/is-observable/index.js?");
427
428/***/ }),
429
430/***/ "./node_modules/isarray/index.js":
431/*!***************************************!*\
432 !*** ./node_modules/isarray/index.js ***!
433 \***************************************/
434/*! no static exports found */
435/***/ (function(module, exports) {
436
437eval("var toString = {}.toString;\n\nmodule.exports = Array.isArray || function (arr) {\n return toString.call(arr) == '[object Array]';\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/isarray/index.js?");
438
439/***/ }),
440
441/***/ "./node_modules/node-fetch/lib/index.mjs":
442/*!***********************************************!*\
443 !*** ./node_modules/node-fetch/lib/index.mjs ***!
444 \***********************************************/
445/*! exports provided: default, Headers, Request, Response, FetchError */
446/***/ (function(__webpack_module__, __webpack_exports__, __webpack_require__) {
447
448"use strict";
449eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Headers\", function() { return Headers; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Request\", function() { return Request; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Response\", function() { return Response; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"FetchError\", function() { return FetchError; });\n/* harmony import */ var stream__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! stream */ \"stream\");\n/* harmony import */ var http__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! http */ \"http\");\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! url */ \"url\");\n/* harmony import */ var https__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! https */ \"https\");\n/* harmony import */ var zlib__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! zlib */ \"zlib\");\n\n\n\n\n\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = stream__WEBPACK_IMPORTED_MODULE_0__.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = stream__WEBPACK_IMPORTED_MODULE_0__.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof stream__WEBPACK_IMPORTED_MODULE_0__) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof stream__WEBPACK_IMPORTED_MODULE_0__) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof stream__WEBPACK_IMPORTED_MODULE_0__)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = /<meta.+?charset=(['\"])(.+?)\\1/i.exec(str);\n\t}\n\n\t// html4\n\tif (!res && str) {\n\t\tres = /<meta[\\s]+?http-equiv=(['\"])content-type\\1[\\s]+?content=(['\"])(.+?)\\2/i.exec(str);\n\t\tif (!res) {\n\t\t\tres = /<meta[\\s]+?content=(['\"])(.+?)\\1[\\s]+?http-equiv=(['\"])content-type\\3/i.exec(str);\n\t\t\tif (res) {\n\t\t\t\tres.pop(); // drop last quote\n\t\t\t}\n\t\t}\n\n\t\tif (res) {\n\t\t\tres = /charset=(.*)/i.exec(res.pop());\n\t\t}\n\t}\n\n\t// xml\n\tif (!res && str) {\n\t\tres = /<\\?xml.+?encoding=(['\"])(.+?)\\1/i.exec(str);\n\t}\n\n\t// found charset\n\tif (res) {\n\t\tcharset = res.pop();\n\n\t\t// prevent decode issues when sites use incorrect encoding\n\t\t// ref: https://hsivonen.fi/encoding-menu/\n\t\tif (charset === 'gb2312' || charset === 'gbk') {\n\t\t\tcharset = 'gb18030';\n\t\t}\n\t}\n\n\t// turn raw buffers into a single utf-8 buffer\n\treturn convert(buffer, 'UTF-8', charset).toString();\n}\n\n/**\n * Detect a URLSearchParams object\n * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143\n *\n * @param Object obj Object to detect by type or brand\n * @return String\n */\nfunction isURLSearchParams(obj) {\n\t// Duck-typing as a necessary condition.\n\tif (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {\n\t\treturn false;\n\t}\n\n\t// Brand-checking and more duck-typing as optional condition.\n\treturn obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';\n}\n\n/**\n * Check if `obj` is a W3C `Blob` object (which `File` inherits from)\n * @param {*} obj\n * @return {boolean}\n */\nfunction isBlob(obj) {\n\treturn typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);\n}\n\n/**\n * Clone body given Res/Req instance\n *\n * @param Mixed instance Response or Request instance\n * @return Mixed\n */\nfunction clone(instance) {\n\tlet p1, p2;\n\tlet body = instance.body;\n\n\t// don't allow cloning a used body\n\tif (instance.bodyUsed) {\n\t\tthrow new Error('cannot clone body after it is used');\n\t}\n\n\t// check that body is a stream and not form-data object\n\t// note: we can't clone the form-data object without having it as a dependency\n\tif (body instanceof stream__WEBPACK_IMPORTED_MODULE_0__ && typeof body.getBoundary !== 'function') {\n\t\t// tee instance body\n\t\tp1 = new PassThrough();\n\t\tp2 = new PassThrough();\n\t\tbody.pipe(p1);\n\t\tbody.pipe(p2);\n\t\t// set instance body to teed body and return the other teed body\n\t\tinstance[INTERNALS].body = p1;\n\t\tbody = p2;\n\t}\n\n\treturn body;\n}\n\n/**\n * Performs the operation \"extract a `Content-Type` value from |object|\" as\n * specified in the specification:\n * https://fetch.spec.whatwg.org/#concept-bodyinit-extract\n *\n * This function assumes that instance.body is present.\n *\n * @param Mixed instance Any options.body input\n */\nfunction extractContentType(body) {\n\tif (body === null) {\n\t\t// body is null\n\t\treturn null;\n\t} else if (typeof body === 'string') {\n\t\t// body is string\n\t\treturn 'text/plain;charset=UTF-8';\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\treturn 'application/x-www-form-urlencoded;charset=UTF-8';\n\t} else if (isBlob(body)) {\n\t\t// body is blob\n\t\treturn body.type || null;\n\t} else if (Buffer.isBuffer(body)) {\n\t\t// body is buffer\n\t\treturn null;\n\t} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\treturn null;\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\treturn null;\n\t} else if (typeof body.getBoundary === 'function') {\n\t\t// detect form data input from form-data module\n\t\treturn `multipart/form-data;boundary=${body.getBoundary()}`;\n\t} else if (body instanceof stream__WEBPACK_IMPORTED_MODULE_0__) {\n\t\t// body is stream\n\t\t// can't really do much about this\n\t\treturn null;\n\t} else {\n\t\t// Body constructor defaults other things to string\n\t\treturn 'text/plain;charset=UTF-8';\n\t}\n}\n\n/**\n * The Fetch Standard treats this as if \"total bytes\" is a property on the body.\n * For us, we have to explicitly get it with a function.\n *\n * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes\n *\n * @param Body instance Instance of Body\n * @return Number? Number of bytes, or null if not possible\n */\nfunction getTotalBytes(instance) {\n\tconst body = instance.body;\n\n\n\tif (body === null) {\n\t\t// body is null\n\t\treturn 0;\n\t} else if (isBlob(body)) {\n\t\treturn body.size;\n\t} else if (Buffer.isBuffer(body)) {\n\t\t// body is buffer\n\t\treturn body.length;\n\t} else if (body && typeof body.getLengthSync === 'function') {\n\t\t// detect form data input from form-data module\n\t\tif (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x\n\t\tbody.hasKnownLength && body.hasKnownLength()) {\n\t\t\t// 2.x\n\t\t\treturn body.getLengthSync();\n\t\t}\n\t\treturn null;\n\t} else {\n\t\t// body is stream\n\t\treturn null;\n\t}\n}\n\n/**\n * Write a Body to a Node.js WritableStream (e.g. http.Request) object.\n *\n * @param Body instance Instance of Body\n * @return Void\n */\nfunction writeToStream(dest, instance) {\n\tconst body = instance.body;\n\n\n\tif (body === null) {\n\t\t// body is null\n\t\tdest.end();\n\t} else if (isBlob(body)) {\n\t\tbody.stream().pipe(dest);\n\t} else if (Buffer.isBuffer(body)) {\n\t\t// body is buffer\n\t\tdest.write(body);\n\t\tdest.end();\n\t} else {\n\t\t// body is stream\n\t\tbody.pipe(dest);\n\t}\n}\n\n// expose Promise\nBody.Promise = global.Promise;\n\n/**\n * headers.js\n *\n * Headers class offers convenient helpers\n */\n\nconst invalidTokenRegex = /[^\\^_`a-zA-Z\\-0-9!#$%&'*+.|~]/;\nconst invalidHeaderCharRegex = /[^\\t\\x20-\\x7e\\x80-\\xff]/;\n\nfunction validateName(name) {\n\tname = `${name}`;\n\tif (invalidTokenRegex.test(name) || name === '') {\n\t\tthrow new TypeError(`${name} is not a legal HTTP header name`);\n\t}\n}\n\nfunction validateValue(value) {\n\tvalue = `${value}`;\n\tif (invalidHeaderCharRegex.test(value)) {\n\t\tthrow new TypeError(`${value} is not a legal HTTP header value`);\n\t}\n}\n\n/**\n * Find the key in the map object given a header name.\n *\n * Returns undefined if not found.\n *\n * @param String name Header name\n * @return String|Undefined\n */\nfunction find(map, name) {\n\tname = name.toLowerCase();\n\tfor (const key in map) {\n\t\tif (key.toLowerCase() === name) {\n\t\t\treturn key;\n\t\t}\n\t}\n\treturn undefined;\n}\n\nconst MAP = Symbol('map');\nclass Headers {\n\t/**\n * Headers class\n *\n * @param Object headers Response headers\n * @return Void\n */\n\tconstructor() {\n\t\tlet init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence<sequence<ByteString>>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record<ByteString, ByteString>\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http__WEBPACK_IMPORTED_MODULE_1__.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = url__WEBPACK_IMPORTED_MODULE_2__.parse;\nconst format_url = url__WEBPACK_IMPORTED_MODULE_2__.format;\n\nconst streamDestructionSupported = 'destroy' in stream__WEBPACK_IMPORTED_MODULE_0__.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof stream__WEBPACK_IMPORTED_MODULE_0__.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = stream__WEBPACK_IMPORTED_MODULE_0__.PassThrough;\nconst resolve_url = url__WEBPACK_IMPORTED_MODULE_2__.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https__WEBPACK_IMPORTED_MODULE_3__ : http__WEBPACK_IMPORTED_MODULE_1__).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof stream__WEBPACK_IMPORTED_MODULE_0__.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib__WEBPACK_IMPORTED_MODULE_4__.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib__WEBPACK_IMPORTED_MODULE_4__.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib__WEBPACK_IMPORTED_MODULE_4__.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib__WEBPACK_IMPORTED_MODULE_4__.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib__WEBPACK_IMPORTED_MODULE_4__.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib__WEBPACK_IMPORTED_MODULE_4__.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib__WEBPACK_IMPORTED_MODULE_4__.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\n/* harmony default export */ __webpack_exports__[\"default\"] = (fetch);\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/node-fetch/lib/index.mjs?");
450
451/***/ }),
452
453/***/ "./node_modules/observable-fns/dist.esm/_scheduler.js":
454/*!************************************************************!*\
455 !*** ./node_modules/observable-fns/dist.esm/_scheduler.js ***!
456 \************************************************************/
457/*! exports provided: AsyncSerialScheduler */
458/***/ (function(module, __webpack_exports__, __webpack_require__) {
459
460"use strict";
461eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"AsyncSerialScheduler\", function() { return AsyncSerialScheduler; });\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nclass AsyncSerialScheduler {\n constructor(observer) {\n this._baseObserver = observer;\n this._pendingPromises = new Set();\n }\n complete() {\n Promise.all(this._pendingPromises)\n .then(() => this._baseObserver.complete())\n .catch(error => this._baseObserver.error(error));\n }\n error(error) {\n this._baseObserver.error(error);\n }\n schedule(task) {\n const prevPromisesCompletion = Promise.all(this._pendingPromises);\n const values = [];\n const next = (value) => values.push(value);\n const promise = Promise.resolve()\n .then(() => __awaiter(this, void 0, void 0, function* () {\n yield prevPromisesCompletion;\n yield task(next);\n this._pendingPromises.delete(promise);\n for (const value of values) {\n this._baseObserver.next(value);\n }\n }))\n .catch(error => {\n this._pendingPromises.delete(promise);\n this._baseObserver.error(error);\n });\n this._pendingPromises.add(promise);\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_scheduler.js?");
462
463/***/ }),
464
465/***/ "./node_modules/observable-fns/dist.esm/_symbols.js":
466/*!**********************************************************!*\
467 !*** ./node_modules/observable-fns/dist.esm/_symbols.js ***!
468 \**********************************************************/
469/*! exports provided: hasSymbols, hasSymbol, getSymbol, registerObservableSymbol */
470/***/ (function(module, __webpack_exports__, __webpack_require__) {
471
472"use strict";
473eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"hasSymbols\", function() { return hasSymbols; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"hasSymbol\", function() { return hasSymbol; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getSymbol\", function() { return getSymbol; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"registerObservableSymbol\", function() { return registerObservableSymbol; });\nconst hasSymbols = () => typeof Symbol === \"function\";\nconst hasSymbol = (name) => hasSymbols() && Boolean(Symbol[name]);\nconst getSymbol = (name) => hasSymbol(name) ? Symbol[name] : \"@@\" + name;\nfunction registerObservableSymbol() {\n if (hasSymbols() && !hasSymbol(\"observable\")) {\n Symbol.observable = Symbol(\"observable\");\n }\n}\nif (!hasSymbol(\"asyncIterator\")) {\n Symbol.asyncIterator = Symbol.asyncIterator || Symbol.for(\"Symbol.asyncIterator\");\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_symbols.js?");
474
475/***/ }),
476
477/***/ "./node_modules/observable-fns/dist.esm/_util.js":
478/*!*******************************************************!*\
479 !*** ./node_modules/observable-fns/dist.esm/_util.js ***!
480 \*******************************************************/
481/*! exports provided: isAsyncIterator, isIterator */
482/***/ (function(module, __webpack_exports__, __webpack_require__) {
483
484"use strict";
485eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isAsyncIterator\", function() { return isAsyncIterator; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isIterator\", function() { return isIterator; });\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_symbols */ \"./node_modules/observable-fns/dist.esm/_symbols.js\");\n/// <reference lib=\"es2018\" />\n\nfunction isAsyncIterator(thing) {\n return thing && Object(_symbols__WEBPACK_IMPORTED_MODULE_0__[\"hasSymbol\"])(\"asyncIterator\") && thing[Symbol.asyncIterator];\n}\nfunction isIterator(thing) {\n return thing && Object(_symbols__WEBPACK_IMPORTED_MODULE_0__[\"hasSymbol\"])(\"iterator\") && thing[Symbol.iterator];\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_util.js?");
486
487/***/ }),
488
489/***/ "./node_modules/observable-fns/dist.esm/filter.js":
490/*!********************************************************!*\
491 !*** ./node_modules/observable-fns/dist.esm/filter.js ***!
492 \********************************************************/
493/*! exports provided: default */
494/***/ (function(module, __webpack_exports__, __webpack_require__) {
495
496"use strict";
497eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \"./node_modules/observable-fns/dist.esm/_scheduler.js\");\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\n/**\n * Filters the values emitted by another observable.\n * To be applied to an input observable using `pipe()`.\n */\nfunction filter(test) {\n return (observable) => {\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\"default\"](observer => {\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\"AsyncSerialScheduler\"](observer);\n const subscription = observable.subscribe({\n complete() {\n scheduler.complete();\n },\n error(error) {\n scheduler.error(error);\n },\n next(input) {\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\n if (yield test(input)) {\n next(input);\n }\n }));\n }\n });\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\"default\"])(subscription);\n });\n };\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (filter);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/filter.js?");
498
499/***/ }),
500
501/***/ "./node_modules/observable-fns/dist.esm/flatMap.js":
502/*!*********************************************************!*\
503 !*** ./node_modules/observable-fns/dist.esm/flatMap.js ***!
504 \*********************************************************/
505/*! exports provided: default */
506/***/ (function(module, __webpack_exports__, __webpack_require__) {
507
508"use strict";
509eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \"./node_modules/observable-fns/dist.esm/_scheduler.js\");\n/* harmony import */ var _util__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./_util */ \"./node_modules/observable-fns/dist.esm/_util.js\");\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (undefined && undefined.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\n\n\n\n\n/**\n * Maps the values emitted by another observable. In contrast to `map()`\n * the `mapper` function returns an array of values that will be emitted\n * separately.\n * Use `flatMap()` to map input values to zero, one or multiple output\n * values. To be applied to an input observable using `pipe()`.\n */\nfunction flatMap(mapper) {\n return (observable) => {\n return new _observable__WEBPACK_IMPORTED_MODULE_2__[\"default\"](observer => {\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\"AsyncSerialScheduler\"](observer);\n const subscription = observable.subscribe({\n complete() {\n scheduler.complete();\n },\n error(error) {\n scheduler.error(error);\n },\n next(input) {\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\n var e_1, _a;\n const mapped = yield mapper(input);\n if (Object(_util__WEBPACK_IMPORTED_MODULE_1__[\"isIterator\"])(mapped) || Object(_util__WEBPACK_IMPORTED_MODULE_1__[\"isAsyncIterator\"])(mapped)) {\n try {\n for (var mapped_1 = __asyncValues(mapped), mapped_1_1; mapped_1_1 = yield mapped_1.next(), !mapped_1_1.done;) {\n const element = mapped_1_1.value;\n next(element);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (mapped_1_1 && !mapped_1_1.done && (_a = mapped_1.return)) yield _a.call(mapped_1);\n }\n finally { if (e_1) throw e_1.error; }\n }\n }\n else {\n mapped.map(output => next(output));\n }\n }));\n }\n });\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_3__[\"default\"])(subscription);\n });\n };\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (flatMap);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/flatMap.js?");
510
511/***/ }),
512
513/***/ "./node_modules/observable-fns/dist.esm/index.js":
514/*!*******************************************************!*\
515 !*** ./node_modules/observable-fns/dist.esm/index.js ***!
516 \*******************************************************/
517/*! exports provided: filter, flatMap, interval, map, merge, multicast, Observable, scan, Subject, unsubscribe */
518/***/ (function(module, __webpack_exports__, __webpack_require__) {
519
520"use strict";
521eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _filter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./filter */ \"./node_modules/observable-fns/dist.esm/filter.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"filter\", function() { return _filter__WEBPACK_IMPORTED_MODULE_0__[\"default\"]; });\n\n/* harmony import */ var _flatMap__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./flatMap */ \"./node_modules/observable-fns/dist.esm/flatMap.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"flatMap\", function() { return _flatMap__WEBPACK_IMPORTED_MODULE_1__[\"default\"]; });\n\n/* harmony import */ var _interval__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./interval */ \"./node_modules/observable-fns/dist.esm/interval.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"interval\", function() { return _interval__WEBPACK_IMPORTED_MODULE_2__[\"default\"]; });\n\n/* harmony import */ var _map__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./map */ \"./node_modules/observable-fns/dist.esm/map.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"map\", function() { return _map__WEBPACK_IMPORTED_MODULE_3__[\"default\"]; });\n\n/* harmony import */ var _merge__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./merge */ \"./node_modules/observable-fns/dist.esm/merge.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"merge\", function() { return _merge__WEBPACK_IMPORTED_MODULE_4__[\"default\"]; });\n\n/* harmony import */ var _multicast__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./multicast */ \"./node_modules/observable-fns/dist.esm/multicast.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"multicast\", function() { return _multicast__WEBPACK_IMPORTED_MODULE_5__[\"default\"]; });\n\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Observable\", function() { return _observable__WEBPACK_IMPORTED_MODULE_6__[\"default\"]; });\n\n/* harmony import */ var _scan__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./scan */ \"./node_modules/observable-fns/dist.esm/scan.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"scan\", function() { return _scan__WEBPACK_IMPORTED_MODULE_7__[\"default\"]; });\n\n/* harmony import */ var _subject__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./subject */ \"./node_modules/observable-fns/dist.esm/subject.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Subject\", function() { return _subject__WEBPACK_IMPORTED_MODULE_8__[\"default\"]; });\n\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"unsubscribe\", function() { return _unsubscribe__WEBPACK_IMPORTED_MODULE_9__[\"default\"]; });\n\n\n\n\n\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/index.js?");
522
523/***/ }),
524
525/***/ "./node_modules/observable-fns/dist.esm/interval.js":
526/*!**********************************************************!*\
527 !*** ./node_modules/observable-fns/dist.esm/interval.js ***!
528 \**********************************************************/
529/*! exports provided: default */
530/***/ (function(module, __webpack_exports__, __webpack_require__) {
531
532"use strict";
533eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return interval; });\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n\n/**\n * Creates an observable that yields a new value every `period` milliseconds.\n * The first value emitted is 0, then 1, 2, etc. The first value is not emitted\n * immediately, but after the first interval.\n */\nfunction interval(period) {\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\"Observable\"](observer => {\n let counter = 0;\n const handle = setInterval(() => {\n observer.next(counter++);\n }, period);\n return () => clearInterval(handle);\n });\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/interval.js?");
534
535/***/ }),
536
537/***/ "./node_modules/observable-fns/dist.esm/map.js":
538/*!*****************************************************!*\
539 !*** ./node_modules/observable-fns/dist.esm/map.js ***!
540 \*****************************************************/
541/*! exports provided: default */
542/***/ (function(module, __webpack_exports__, __webpack_require__) {
543
544"use strict";
545eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \"./node_modules/observable-fns/dist.esm/_scheduler.js\");\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\n/**\n * Maps the values emitted by another observable to different values.\n * To be applied to an input observable using `pipe()`.\n */\nfunction map(mapper) {\n return (observable) => {\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\"default\"](observer => {\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\"AsyncSerialScheduler\"](observer);\n const subscription = observable.subscribe({\n complete() {\n scheduler.complete();\n },\n error(error) {\n scheduler.error(error);\n },\n next(input) {\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\n const mapped = yield mapper(input);\n next(mapped);\n }));\n }\n });\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\"default\"])(subscription);\n });\n };\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (map);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/map.js?");
546
547/***/ }),
548
549/***/ "./node_modules/observable-fns/dist.esm/merge.js":
550/*!*******************************************************!*\
551 !*** ./node_modules/observable-fns/dist.esm/merge.js ***!
552 \*******************************************************/
553/*! exports provided: default */
554/***/ (function(module, __webpack_exports__, __webpack_require__) {
555
556"use strict";
557eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\n\n\nfunction merge(...observables) {\n if (observables.length === 0) {\n return _observable__WEBPACK_IMPORTED_MODULE_0__[\"Observable\"].from([]);\n }\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\"Observable\"](observer => {\n let completed = 0;\n const subscriptions = observables.map(input => {\n return input.subscribe({\n error(error) {\n observer.error(error);\n unsubscribeAll();\n },\n next(value) {\n observer.next(value);\n },\n complete() {\n if (++completed === observables.length) {\n observer.complete();\n unsubscribeAll();\n }\n }\n });\n });\n const unsubscribeAll = () => {\n subscriptions.forEach(subscription => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_1__[\"default\"])(subscription));\n };\n return unsubscribeAll;\n });\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (merge);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/merge.js?");
558
559/***/ }),
560
561/***/ "./node_modules/observable-fns/dist.esm/multicast.js":
562/*!***********************************************************!*\
563 !*** ./node_modules/observable-fns/dist.esm/multicast.js ***!
564 \***********************************************************/
565/*! exports provided: default */
566/***/ (function(module, __webpack_exports__, __webpack_require__) {
567
568"use strict";
569eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _subject__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./subject */ \"./node_modules/observable-fns/dist.esm/subject.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\n\n\n\n// TODO: Subject already creates additional observables \"under the hood\",\n// now we introduce even more. A true native MulticastObservable\n// would be preferable.\n/**\n * Takes a \"cold\" observable and returns a wrapping \"hot\" observable that\n * proxies the input observable's values and errors.\n *\n * An observable is called \"cold\" when its initialization function is run\n * for each new subscriber. This is how observable-fns's `Observable`\n * implementation works.\n *\n * A hot observable is an observable where new subscribers subscribe to\n * the upcoming values of an already-initialiazed observable.\n *\n * The multicast observable will lazily subscribe to the source observable\n * once it has its first own subscriber and will unsubscribe from the\n * source observable when its last own subscriber unsubscribed.\n */\nfunction multicast(coldObservable) {\n const subject = new _subject__WEBPACK_IMPORTED_MODULE_1__[\"default\"]();\n let sourceSubscription;\n let subscriberCount = 0;\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\"default\"](observer => {\n // Init source subscription lazily\n if (!sourceSubscription) {\n sourceSubscription = coldObservable.subscribe(subject);\n }\n // Pipe all events from `subject` into this observable\n const subscription = subject.subscribe(observer);\n subscriberCount++;\n return () => {\n subscriberCount--;\n subscription.unsubscribe();\n // Close source subscription once last subscriber has unsubscribed\n if (subscriberCount === 0) {\n Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\"default\"])(sourceSubscription);\n sourceSubscription = undefined;\n }\n };\n });\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (multicast);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/multicast.js?");
570
571/***/ }),
572
573/***/ "./node_modules/observable-fns/dist.esm/observable.js":
574/*!************************************************************!*\
575 !*** ./node_modules/observable-fns/dist.esm/observable.js ***!
576 \************************************************************/
577/*! exports provided: Subscription, SubscriptionObserver, Observable, default */
578/***/ (function(module, __webpack_exports__, __webpack_require__) {
579
580"use strict";
581eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Subscription\", function() { return Subscription; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"SubscriptionObserver\", function() { return SubscriptionObserver; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Observable\", function() { return Observable; });\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./symbols */ \"./node_modules/observable-fns/dist.esm/symbols.js\");\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./_symbols */ \"./node_modules/observable-fns/dist.esm/_symbols.js\");\n/**\n * Based on <https://raw.githubusercontent.com/zenparsing/zen-observable/master/src/Observable.js>\n * At commit: f63849a8c60af5d514efc8e9d6138d8273c49ad6\n */\n\n\nconst SymbolIterator = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\"getSymbol\"])(\"iterator\");\nconst SymbolObservable = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\"getSymbol\"])(\"observable\");\nconst SymbolSpecies = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\"getSymbol\"])(\"species\");\n// === Abstract Operations ===\nfunction getMethod(obj, key) {\n const value = obj[key];\n if (value == null) {\n return undefined;\n }\n if (typeof value !== \"function\") {\n throw new TypeError(value + \" is not a function\");\n }\n return value;\n}\nfunction getSpecies(obj) {\n let ctor = obj.constructor;\n if (ctor !== undefined) {\n ctor = ctor[SymbolSpecies];\n if (ctor === null) {\n ctor = undefined;\n }\n }\n return ctor !== undefined ? ctor : Observable;\n}\nfunction isObservable(x) {\n return x instanceof Observable; // SPEC: Brand check\n}\nfunction hostReportError(error) {\n if (hostReportError.log) {\n hostReportError.log(error);\n }\n else {\n setTimeout(() => { throw error; }, 0);\n }\n}\nfunction enqueue(fn) {\n Promise.resolve().then(() => {\n try {\n fn();\n }\n catch (e) {\n hostReportError(e);\n }\n });\n}\nfunction cleanupSubscription(subscription) {\n const cleanup = subscription._cleanup;\n if (cleanup === undefined) {\n return;\n }\n subscription._cleanup = undefined;\n if (!cleanup) {\n return;\n }\n try {\n if (typeof cleanup === \"function\") {\n cleanup();\n }\n else {\n const unsubscribe = getMethod(cleanup, \"unsubscribe\");\n if (unsubscribe) {\n unsubscribe.call(cleanup);\n }\n }\n }\n catch (e) {\n hostReportError(e);\n }\n}\nfunction closeSubscription(subscription) {\n subscription._observer = undefined;\n subscription._queue = undefined;\n subscription._state = \"closed\";\n}\nfunction flushSubscription(subscription) {\n const queue = subscription._queue;\n if (!queue) {\n return;\n }\n subscription._queue = undefined;\n subscription._state = \"ready\";\n for (const item of queue) {\n notifySubscription(subscription, item.type, item.value);\n if (subscription._state === \"closed\") {\n break;\n }\n }\n}\nfunction notifySubscription(subscription, type, value) {\n subscription._state = \"running\";\n const observer = subscription._observer;\n try {\n const m = observer ? getMethod(observer, type) : undefined;\n switch (type) {\n case \"next\":\n if (m)\n m.call(observer, value);\n break;\n case \"error\":\n closeSubscription(subscription);\n if (m)\n m.call(observer, value);\n else\n throw value;\n break;\n case \"complete\":\n closeSubscription(subscription);\n if (m)\n m.call(observer);\n break;\n }\n }\n catch (e) {\n hostReportError(e);\n }\n if (subscription._state === \"closed\") {\n cleanupSubscription(subscription);\n }\n else if (subscription._state === \"running\") {\n subscription._state = \"ready\";\n }\n}\nfunction onNotify(subscription, type, value) {\n if (subscription._state === \"closed\") {\n return;\n }\n if (subscription._state === \"buffering\") {\n subscription._queue = subscription._queue || [];\n subscription._queue.push({ type, value });\n return;\n }\n if (subscription._state !== \"ready\") {\n subscription._state = \"buffering\";\n subscription._queue = [{ type, value }];\n enqueue(() => flushSubscription(subscription));\n return;\n }\n notifySubscription(subscription, type, value);\n}\nclass Subscription {\n constructor(observer, subscriber) {\n // ASSERT: observer is an object\n // ASSERT: subscriber is callable\n this._cleanup = undefined;\n this._observer = observer;\n this._queue = undefined;\n this._state = \"initializing\";\n const subscriptionObserver = new SubscriptionObserver(this);\n try {\n this._cleanup = subscriber.call(undefined, subscriptionObserver);\n }\n catch (e) {\n subscriptionObserver.error(e);\n }\n if (this._state === \"initializing\") {\n this._state = \"ready\";\n }\n }\n get closed() {\n return this._state === \"closed\";\n }\n unsubscribe() {\n if (this._state !== \"closed\") {\n closeSubscription(this);\n cleanupSubscription(this);\n }\n }\n}\nclass SubscriptionObserver {\n constructor(subscription) { this._subscription = subscription; }\n get closed() { return this._subscription._state === \"closed\"; }\n next(value) { onNotify(this._subscription, \"next\", value); }\n error(value) { onNotify(this._subscription, \"error\", value); }\n complete() { onNotify(this._subscription, \"complete\"); }\n}\n/**\n * The basic Observable class. This primitive is used to wrap asynchronous\n * data streams in a common standardized data type that is interoperable\n * between libraries and can be composed to represent more complex processes.\n */\nclass Observable {\n constructor(subscriber) {\n if (!(this instanceof Observable)) {\n throw new TypeError(\"Observable cannot be called as a function\");\n }\n if (typeof subscriber !== \"function\") {\n throw new TypeError(\"Observable initializer must be a function\");\n }\n this._subscriber = subscriber;\n }\n subscribe(nextOrObserver, onError, onComplete) {\n if (typeof nextOrObserver !== \"object\" || nextOrObserver === null) {\n nextOrObserver = {\n next: nextOrObserver,\n error: onError,\n complete: onComplete\n };\n }\n return new Subscription(nextOrObserver, this._subscriber);\n }\n pipe(first, ...mappers) {\n // tslint:disable-next-line no-this-assignment\n let intermediate = this;\n for (const mapper of [first, ...mappers]) {\n intermediate = mapper(intermediate);\n }\n return intermediate;\n }\n tap(nextOrObserver, onError, onComplete) {\n const tapObserver = typeof nextOrObserver !== \"object\" || nextOrObserver === null\n ? {\n next: nextOrObserver,\n error: onError,\n complete: onComplete\n }\n : nextOrObserver;\n return new Observable(observer => {\n return this.subscribe({\n next(value) {\n tapObserver.next && tapObserver.next(value);\n observer.next(value);\n },\n error(error) {\n tapObserver.error && tapObserver.error(error);\n observer.error(error);\n },\n complete() {\n tapObserver.complete && tapObserver.complete();\n observer.complete();\n },\n start(subscription) {\n tapObserver.start && tapObserver.start(subscription);\n }\n });\n });\n }\n forEach(fn) {\n return new Promise((resolve, reject) => {\n if (typeof fn !== \"function\") {\n reject(new TypeError(fn + \" is not a function\"));\n return;\n }\n function done() {\n subscription.unsubscribe();\n resolve(undefined);\n }\n const subscription = this.subscribe({\n next(value) {\n try {\n fn(value, done);\n }\n catch (e) {\n reject(e);\n subscription.unsubscribe();\n }\n },\n error(error) {\n reject(error);\n },\n complete() {\n resolve(undefined);\n }\n });\n });\n }\n map(fn) {\n if (typeof fn !== \"function\") {\n throw new TypeError(fn + \" is not a function\");\n }\n const C = getSpecies(this);\n return new C(observer => this.subscribe({\n next(value) {\n let propagatedValue = value;\n try {\n propagatedValue = fn(value);\n }\n catch (e) {\n return observer.error(e);\n }\n observer.next(propagatedValue);\n },\n error(e) { observer.error(e); },\n complete() { observer.complete(); },\n }));\n }\n filter(fn) {\n if (typeof fn !== \"function\") {\n throw new TypeError(fn + \" is not a function\");\n }\n const C = getSpecies(this);\n return new C(observer => this.subscribe({\n next(value) {\n try {\n if (!fn(value))\n return;\n }\n catch (e) {\n return observer.error(e);\n }\n observer.next(value);\n },\n error(e) { observer.error(e); },\n complete() { observer.complete(); },\n }));\n }\n reduce(fn, seed) {\n if (typeof fn !== \"function\") {\n throw new TypeError(fn + \" is not a function\");\n }\n const C = getSpecies(this);\n const hasSeed = arguments.length > 1;\n let hasValue = false;\n let acc = seed;\n return new C(observer => this.subscribe({\n next(value) {\n const first = !hasValue;\n hasValue = true;\n if (!first || hasSeed) {\n try {\n acc = fn(acc, value);\n }\n catch (e) {\n return observer.error(e);\n }\n }\n else {\n acc = value;\n }\n },\n error(e) { observer.error(e); },\n complete() {\n if (!hasValue && !hasSeed) {\n return observer.error(new TypeError(\"Cannot reduce an empty sequence\"));\n }\n observer.next(acc);\n observer.complete();\n },\n }));\n }\n concat(...sources) {\n const C = getSpecies(this);\n return new C(observer => {\n let subscription;\n let index = 0;\n function startNext(next) {\n subscription = next.subscribe({\n next(v) { observer.next(v); },\n error(e) { observer.error(e); },\n complete() {\n if (index === sources.length) {\n subscription = undefined;\n observer.complete();\n }\n else {\n startNext(C.from(sources[index++]));\n }\n },\n });\n }\n startNext(this);\n return () => {\n if (subscription) {\n subscription.unsubscribe();\n subscription = undefined;\n }\n };\n });\n }\n flatMap(fn) {\n if (typeof fn !== \"function\") {\n throw new TypeError(fn + \" is not a function\");\n }\n const C = getSpecies(this);\n return new C(observer => {\n const subscriptions = [];\n const outer = this.subscribe({\n next(value) {\n let normalizedValue;\n if (fn) {\n try {\n normalizedValue = fn(value);\n }\n catch (e) {\n return observer.error(e);\n }\n }\n else {\n normalizedValue = value;\n }\n const inner = C.from(normalizedValue).subscribe({\n next(innerValue) { observer.next(innerValue); },\n error(e) { observer.error(e); },\n complete() {\n const i = subscriptions.indexOf(inner);\n if (i >= 0)\n subscriptions.splice(i, 1);\n completeIfDone();\n },\n });\n subscriptions.push(inner);\n },\n error(e) { observer.error(e); },\n complete() { completeIfDone(); },\n });\n function completeIfDone() {\n if (outer.closed && subscriptions.length === 0) {\n observer.complete();\n }\n }\n return () => {\n subscriptions.forEach(s => s.unsubscribe());\n outer.unsubscribe();\n };\n });\n }\n [(Symbol.observable, SymbolObservable)]() { return this; }\n static from(x) {\n const C = (typeof this === \"function\" ? this : Observable);\n if (x == null) {\n throw new TypeError(x + \" is not an object\");\n }\n const observableMethod = getMethod(x, SymbolObservable);\n if (observableMethod) {\n const observable = observableMethod.call(x);\n if (Object(observable) !== observable) {\n throw new TypeError(observable + \" is not an object\");\n }\n if (isObservable(observable) && observable.constructor === C) {\n return observable;\n }\n return new C(observer => observable.subscribe(observer));\n }\n if (Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\"hasSymbol\"])(\"iterator\")) {\n const iteratorMethod = getMethod(x, SymbolIterator);\n if (iteratorMethod) {\n return new C(observer => {\n enqueue(() => {\n if (observer.closed)\n return;\n for (const item of iteratorMethod.call(x)) {\n observer.next(item);\n if (observer.closed)\n return;\n }\n observer.complete();\n });\n });\n }\n }\n if (Array.isArray(x)) {\n return new C(observer => {\n enqueue(() => {\n if (observer.closed)\n return;\n for (const item of x) {\n observer.next(item);\n if (observer.closed)\n return;\n }\n observer.complete();\n });\n });\n }\n throw new TypeError(x + \" is not observable\");\n }\n static of(...items) {\n const C = (typeof this === \"function\" ? this : Observable);\n return new C(observer => {\n enqueue(() => {\n if (observer.closed)\n return;\n for (const item of items) {\n observer.next(item);\n if (observer.closed)\n return;\n }\n observer.complete();\n });\n });\n }\n static get [SymbolSpecies]() { return this; }\n}\nif (Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\"hasSymbols\"])()) {\n Object.defineProperty(Observable, Symbol(\"extensions\"), {\n value: {\n symbol: SymbolObservable,\n hostReportError,\n },\n configurable: true,\n });\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (Observable);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/observable.js?");
582
583/***/ }),
584
585/***/ "./node_modules/observable-fns/dist.esm/scan.js":
586/*!******************************************************!*\
587 !*** ./node_modules/observable-fns/dist.esm/scan.js ***!
588 \******************************************************/
589/*! exports provided: default */
590/***/ (function(module, __webpack_exports__, __webpack_require__) {
591
592"use strict";
593eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \"./node_modules/observable-fns/dist.esm/_scheduler.js\");\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \"./node_modules/observable-fns/dist.esm/unsubscribe.js\");\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\nfunction scan(accumulator, seed) {\n return (observable) => {\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\"default\"](observer => {\n let accumulated;\n let index = 0;\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\"AsyncSerialScheduler\"](observer);\n const subscription = observable.subscribe({\n complete() {\n scheduler.complete();\n },\n error(error) {\n scheduler.error(error);\n },\n next(value) {\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\n const prevAcc = index === 0\n ? (typeof seed === \"undefined\" ? value : seed)\n : accumulated;\n accumulated = yield accumulator(prevAcc, value, index++);\n next(accumulated);\n }));\n }\n });\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\"default\"])(subscription);\n });\n };\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (scan);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/scan.js?");
594
595/***/ }),
596
597/***/ "./node_modules/observable-fns/dist.esm/subject.js":
598/*!*********************************************************!*\
599 !*** ./node_modules/observable-fns/dist.esm/subject.js ***!
600 \*********************************************************/
601/*! exports provided: default */
602/***/ (function(module, __webpack_exports__, __webpack_require__) {
603
604"use strict";
605eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \"./node_modules/observable-fns/dist.esm/observable.js\");\n\n// TODO: This observer iteration approach looks inelegant and expensive\n// Idea: Come up with super class for Subscription that contains the\n// notify*, ... methods and use it here\n/**\n * A subject is a \"hot\" observable (see `multicast`) that has its observer\n * methods (`.next(value)`, `.error(error)`, `.complete()`) exposed.\n *\n * Be careful, though! With great power comes great responsibility. Only use\n * the `Subject` when you really need to trigger updates \"from the outside\" and\n * try to keep the code that can access it to a minimum. Return\n * `Observable.from(mySubject)` to not allow other code to mutate.\n */\nclass MulticastSubject extends _observable__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n constructor() {\n super(observer => {\n this._observers.add(observer);\n return () => this._observers.delete(observer);\n });\n this._observers = new Set();\n }\n next(value) {\n for (const observer of this._observers) {\n observer.next(value);\n }\n }\n error(error) {\n for (const observer of this._observers) {\n observer.error(error);\n }\n }\n complete() {\n for (const observer of this._observers) {\n observer.complete();\n }\n }\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (MulticastSubject);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/subject.js?");
606
607/***/ }),
608
609/***/ "./node_modules/observable-fns/dist.esm/symbols.js":
610/*!*********************************************************!*\
611 !*** ./node_modules/observable-fns/dist.esm/symbols.js ***!
612 \*********************************************************/
613/*! no exports provided */
614/***/ (function(module, __webpack_exports__, __webpack_require__) {
615
616"use strict";
617eval("__webpack_require__.r(__webpack_exports__);\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/symbols.js?");
618
619/***/ }),
620
621/***/ "./node_modules/observable-fns/dist.esm/unsubscribe.js":
622/*!*************************************************************!*\
623 !*** ./node_modules/observable-fns/dist.esm/unsubscribe.js ***!
624 \*************************************************************/
625/*! exports provided: default */
626/***/ (function(module, __webpack_exports__, __webpack_require__) {
627
628"use strict";
629eval("__webpack_require__.r(__webpack_exports__);\n/**\n * Unsubscribe from a subscription returned by something that looks like an observable,\n * but is not necessarily our observable implementation.\n */\nfunction unsubscribe(subscription) {\n if (typeof subscription === \"function\") {\n subscription();\n }\n else if (subscription && typeof subscription.unsubscribe === \"function\") {\n subscription.unsubscribe();\n }\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (unsubscribe);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/unsubscribe.js?");
630
631/***/ }),
632
633/***/ "./node_modules/pako/lib/inflate.js":
634/*!******************************************!*\
635 !*** ./node_modules/pako/lib/inflate.js ***!
636 \******************************************/
637/*! no static exports found */
638/***/ (function(module, exports, __webpack_require__) {
639
640"use strict";
641eval("\n\n\nvar zlib_inflate = __webpack_require__(/*! ./zlib/inflate */ \"./node_modules/pako/lib/zlib/inflate.js\");\nvar utils = __webpack_require__(/*! ./utils/common */ \"./node_modules/pako/lib/utils/common.js\");\nvar strings = __webpack_require__(/*! ./utils/strings */ \"./node_modules/pako/lib/utils/strings.js\");\nvar c = __webpack_require__(/*! ./zlib/constants */ \"./node_modules/pako/lib/zlib/constants.js\");\nvar msg = __webpack_require__(/*! ./zlib/messages */ \"./node_modules/pako/lib/zlib/messages.js\");\nvar ZStream = __webpack_require__(/*! ./zlib/zstream */ \"./node_modules/pako/lib/zlib/zstream.js\");\nvar GZheader = __webpack_require__(/*! ./zlib/gzheader */ \"./node_modules/pako/lib/zlib/gzheader.js\");\n\nvar toString = Object.prototype.toString;\n\n/**\n * class Inflate\n *\n * Generic JS-style wrapper for zlib calls. If you don't need\n * streaming behaviour - use more simple functions: [[inflate]]\n * and [[inflateRaw]].\n **/\n\n/* internal\n * inflate.chunks -> Array\n *\n * Chunks of output data, if [[Inflate#onData]] not overridden.\n **/\n\n/**\n * Inflate.result -> Uint8Array|Array|String\n *\n * Uncompressed result, generated by default [[Inflate#onData]]\n * and [[Inflate#onEnd]] handlers. Filled after you push last chunk\n * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you\n * push a chunk with explicit flush (call [[Inflate#push]] with\n * `Z_SYNC_FLUSH` param).\n **/\n\n/**\n * Inflate.err -> Number\n *\n * Error code after inflate finished. 0 (Z_OK) on success.\n * Should be checked if broken data possible.\n **/\n\n/**\n * Inflate.msg -> String\n *\n * Error message, if [[Inflate.err]] != 0\n **/\n\n\n/**\n * new Inflate(options)\n * - options (Object): zlib inflate options.\n *\n * Creates new inflator instance with specified params. Throws exception\n * on bad params. Supported options:\n *\n * - `windowBits`\n * - `dictionary`\n *\n * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)\n * for more information on these.\n *\n * Additional options, for internal needs:\n *\n * - `chunkSize` - size of generated data chunks (16K by default)\n * - `raw` (Boolean) - do raw inflate\n * - `to` (String) - if equal to 'string', then result will be converted\n * from utf8 to utf16 (javascript) string. When string output requested,\n * chunk length can differ from `chunkSize`, depending on content.\n *\n * By default, when no options set, autodetect deflate/gzip data format via\n * wrapper header.\n *\n * ##### Example:\n *\n * ```javascript\n * var pako = require('pako')\n * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9])\n * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]);\n *\n * var inflate = new pako.Inflate({ level: 3});\n *\n * inflate.push(chunk1, false);\n * inflate.push(chunk2, true); // true -> last chunk\n *\n * if (inflate.err) { throw new Error(inflate.err); }\n *\n * console.log(inflate.result);\n * ```\n **/\nfunction Inflate(options) {\n if (!(this instanceof Inflate)) return new Inflate(options);\n\n this.options = utils.assign({\n chunkSize: 16384,\n windowBits: 0,\n to: ''\n }, options || {});\n\n var opt = this.options;\n\n // Force window size for `raw` data, if not set directly,\n // because we have no header for autodetect.\n if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) {\n opt.windowBits = -opt.windowBits;\n if (opt.windowBits === 0) { opt.windowBits = -15; }\n }\n\n // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate\n if ((opt.windowBits >= 0) && (opt.windowBits < 16) &&\n !(options && options.windowBits)) {\n opt.windowBits += 32;\n }\n\n // Gzip header has no info about windows size, we can do autodetect only\n // for deflate. So, if window size not set, force it to max when gzip possible\n if ((opt.windowBits > 15) && (opt.windowBits < 48)) {\n // bit 3 (16) -> gzipped data\n // bit 4 (32) -> autodetect gzip/deflate\n if ((opt.windowBits & 15) === 0) {\n opt.windowBits |= 15;\n }\n }\n\n this.err = 0; // error code, if happens (0 = Z_OK)\n this.msg = ''; // error message\n this.ended = false; // used to avoid multiple onEnd() calls\n this.chunks = []; // chunks of compressed data\n\n this.strm = new ZStream();\n this.strm.avail_out = 0;\n\n var status = zlib_inflate.inflateInit2(\n this.strm,\n opt.windowBits\n );\n\n if (status !== c.Z_OK) {\n throw new Error(msg[status]);\n }\n\n this.header = new GZheader();\n\n zlib_inflate.inflateGetHeader(this.strm, this.header);\n\n // Setup dictionary\n if (opt.dictionary) {\n // Convert data if needed\n if (typeof opt.dictionary === 'string') {\n opt.dictionary = strings.string2buf(opt.dictionary);\n } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') {\n opt.dictionary = new Uint8Array(opt.dictionary);\n }\n if (opt.raw) { //In raw mode we need to set the dictionary early\n status = zlib_inflate.inflateSetDictionary(this.strm, opt.dictionary);\n if (status !== c.Z_OK) {\n throw new Error(msg[status]);\n }\n }\n }\n}\n\n/**\n * Inflate#push(data[, mode]) -> Boolean\n * - data (Uint8Array|Array|ArrayBuffer|String): input data\n * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes.\n * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH.\n *\n * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with\n * new output chunks. Returns `true` on success. The last data block must have\n * mode Z_FINISH (or `true`). That will flush internal pending buffers and call\n * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you\n * can use mode Z_SYNC_FLUSH, keeping the decompression context.\n *\n * On fail call [[Inflate#onEnd]] with error code and return false.\n *\n * We strongly recommend to use `Uint8Array` on input for best speed (output\n * format is detected automatically). Also, don't skip last param and always\n * use the same type in your code (boolean or number). That will improve JS speed.\n *\n * For regular `Array`-s make sure all elements are [0..255].\n *\n * ##### Example\n *\n * ```javascript\n * push(chunk, false); // push one of data chunks\n * ...\n * push(chunk, true); // push last chunk\n * ```\n **/\nInflate.prototype.push = function (data, mode) {\n var strm = this.strm;\n var chunkSize = this.options.chunkSize;\n var dictionary = this.options.dictionary;\n var status, _mode;\n var next_out_utf8, tail, utf8str;\n\n // Flag to properly process Z_BUF_ERROR on testing inflate call\n // when we check that all output data was flushed.\n var allowBufError = false;\n\n if (this.ended) { return false; }\n _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH);\n\n // Convert data if needed\n if (typeof data === 'string') {\n // Only binary strings can be decompressed on practice\n strm.input = strings.binstring2buf(data);\n } else if (toString.call(data) === '[object ArrayBuffer]') {\n strm.input = new Uint8Array(data);\n } else {\n strm.input = data;\n }\n\n strm.next_in = 0;\n strm.avail_in = strm.input.length;\n\n do {\n if (strm.avail_out === 0) {\n strm.output = new utils.Buf8(chunkSize);\n strm.next_out = 0;\n strm.avail_out = chunkSize;\n }\n\n status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */\n\n if (status === c.Z_NEED_DICT && dictionary) {\n status = zlib_inflate.inflateSetDictionary(this.strm, dictionary);\n }\n\n if (status === c.Z_BUF_ERROR && allowBufError === true) {\n status = c.Z_OK;\n allowBufError = false;\n }\n\n if (status !== c.Z_STREAM_END && status !== c.Z_OK) {\n this.onEnd(status);\n this.ended = true;\n return false;\n }\n\n if (strm.next_out) {\n if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) {\n\n if (this.options.to === 'string') {\n\n next_out_utf8 = strings.utf8border(strm.output, strm.next_out);\n\n tail = strm.next_out - next_out_utf8;\n utf8str = strings.buf2string(strm.output, next_out_utf8);\n\n // move tail\n strm.next_out = tail;\n strm.avail_out = chunkSize - tail;\n if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); }\n\n this.onData(utf8str);\n\n } else {\n this.onData(utils.shrinkBuf(strm.output, strm.next_out));\n }\n }\n }\n\n // When no more input data, we should check that internal inflate buffers\n // are flushed. The only way to do it when avail_out = 0 - run one more\n // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR.\n // Here we set flag to process this error properly.\n //\n // NOTE. Deflate does not return error in this case and does not needs such\n // logic.\n if (strm.avail_in === 0 && strm.avail_out === 0) {\n allowBufError = true;\n }\n\n } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END);\n\n if (status === c.Z_STREAM_END) {\n _mode = c.Z_FINISH;\n }\n\n // Finalize on the last chunk.\n if (_mode === c.Z_FINISH) {\n status = zlib_inflate.inflateEnd(this.strm);\n this.onEnd(status);\n this.ended = true;\n return status === c.Z_OK;\n }\n\n // callback interim results if Z_SYNC_FLUSH.\n if (_mode === c.Z_SYNC_FLUSH) {\n this.onEnd(c.Z_OK);\n strm.avail_out = 0;\n return true;\n }\n\n return true;\n};\n\n\n/**\n * Inflate#onData(chunk) -> Void\n * - chunk (Uint8Array|Array|String): output data. Type of array depends\n * on js engine support. When string output requested, each chunk\n * will be string.\n *\n * By default, stores data blocks in `chunks[]` property and glue\n * those in `onEnd`. Override this handler, if you need another behaviour.\n **/\nInflate.prototype.onData = function (chunk) {\n this.chunks.push(chunk);\n};\n\n\n/**\n * Inflate#onEnd(status) -> Void\n * - status (Number): inflate status. 0 (Z_OK) on success,\n * other if not.\n *\n * Called either after you tell inflate that the input stream is\n * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH)\n * or if an error happened. By default - join collected chunks,\n * free memory and fill `results` / `err` properties.\n **/\nInflate.prototype.onEnd = function (status) {\n // On success - join\n if (status === c.Z_OK) {\n if (this.options.to === 'string') {\n // Glue & convert here, until we teach pako to send\n // utf8 aligned strings to onData\n this.result = this.chunks.join('');\n } else {\n this.result = utils.flattenChunks(this.chunks);\n }\n }\n this.chunks = [];\n this.err = status;\n this.msg = this.strm.msg;\n};\n\n\n/**\n * inflate(data[, options]) -> Uint8Array|Array|String\n * - data (Uint8Array|Array|String): input data to decompress.\n * - options (Object): zlib inflate options.\n *\n * Decompress `data` with inflate/ungzip and `options`. Autodetect\n * format via wrapper header by default. That's why we don't provide\n * separate `ungzip` method.\n *\n * Supported options are:\n *\n * - windowBits\n *\n * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)\n * for more information.\n *\n * Sugar (options):\n *\n * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify\n * negative windowBits implicitly.\n * - `to` (String) - if equal to 'string', then result will be converted\n * from utf8 to utf16 (javascript) string. When string output requested,\n * chunk length can differ from `chunkSize`, depending on content.\n *\n *\n * ##### Example:\n *\n * ```javascript\n * var pako = require('pako')\n * , input = pako.deflate([1,2,3,4,5,6,7,8,9])\n * , output;\n *\n * try {\n * output = pako.inflate(input);\n * } catch (err)\n * console.log(err);\n * }\n * ```\n **/\nfunction inflate(input, options) {\n var inflator = new Inflate(options);\n\n inflator.push(input, true);\n\n // That will never happens, if you don't cheat with options :)\n if (inflator.err) { throw inflator.msg || msg[inflator.err]; }\n\n return inflator.result;\n}\n\n\n/**\n * inflateRaw(data[, options]) -> Uint8Array|Array|String\n * - data (Uint8Array|Array|String): input data to decompress.\n * - options (Object): zlib inflate options.\n *\n * The same as [[inflate]], but creates raw data, without wrapper\n * (header and adler32 crc).\n **/\nfunction inflateRaw(input, options) {\n options = options || {};\n options.raw = true;\n return inflate(input, options);\n}\n\n\n/**\n * ungzip(data[, options]) -> Uint8Array|Array|String\n * - data (Uint8Array|Array|String): input data to decompress.\n * - options (Object): zlib inflate options.\n *\n * Just shortcut to [[inflate]], because it autodetects format\n * by header.content. Done for convenience.\n **/\n\n\nexports.Inflate = Inflate;\nexports.inflate = inflate;\nexports.inflateRaw = inflateRaw;\nexports.ungzip = inflate;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/inflate.js?");
642
643/***/ }),
644
645/***/ "./node_modules/pako/lib/utils/common.js":
646/*!***********************************************!*\
647 !*** ./node_modules/pako/lib/utils/common.js ***!
648 \***********************************************/
649/*! no static exports found */
650/***/ (function(module, exports, __webpack_require__) {
651
652"use strict";
653eval("\n\n\nvar TYPED_OK = (typeof Uint8Array !== 'undefined') &&\n (typeof Uint16Array !== 'undefined') &&\n (typeof Int32Array !== 'undefined');\n\nfunction _has(obj, key) {\n return Object.prototype.hasOwnProperty.call(obj, key);\n}\n\nexports.assign = function (obj /*from1, from2, from3, ...*/) {\n var sources = Array.prototype.slice.call(arguments, 1);\n while (sources.length) {\n var source = sources.shift();\n if (!source) { continue; }\n\n if (typeof source !== 'object') {\n throw new TypeError(source + 'must be non-object');\n }\n\n for (var p in source) {\n if (_has(source, p)) {\n obj[p] = source[p];\n }\n }\n }\n\n return obj;\n};\n\n\n// reduce buffer size, avoiding mem copy\nexports.shrinkBuf = function (buf, size) {\n if (buf.length === size) { return buf; }\n if (buf.subarray) { return buf.subarray(0, size); }\n buf.length = size;\n return buf;\n};\n\n\nvar fnTyped = {\n arraySet: function (dest, src, src_offs, len, dest_offs) {\n if (src.subarray && dest.subarray) {\n dest.set(src.subarray(src_offs, src_offs + len), dest_offs);\n return;\n }\n // Fallback to ordinary array\n for (var i = 0; i < len; i++) {\n dest[dest_offs + i] = src[src_offs + i];\n }\n },\n // Join array of chunks to single array.\n flattenChunks: function (chunks) {\n var i, l, len, pos, chunk, result;\n\n // calculate data length\n len = 0;\n for (i = 0, l = chunks.length; i < l; i++) {\n len += chunks[i].length;\n }\n\n // join chunks\n result = new Uint8Array(len);\n pos = 0;\n for (i = 0, l = chunks.length; i < l; i++) {\n chunk = chunks[i];\n result.set(chunk, pos);\n pos += chunk.length;\n }\n\n return result;\n }\n};\n\nvar fnUntyped = {\n arraySet: function (dest, src, src_offs, len, dest_offs) {\n for (var i = 0; i < len; i++) {\n dest[dest_offs + i] = src[src_offs + i];\n }\n },\n // Join array of chunks to single array.\n flattenChunks: function (chunks) {\n return [].concat.apply([], chunks);\n }\n};\n\n\n// Enable/Disable typed arrays use, for testing\n//\nexports.setTyped = function (on) {\n if (on) {\n exports.Buf8 = Uint8Array;\n exports.Buf16 = Uint16Array;\n exports.Buf32 = Int32Array;\n exports.assign(exports, fnTyped);\n } else {\n exports.Buf8 = Array;\n exports.Buf16 = Array;\n exports.Buf32 = Array;\n exports.assign(exports, fnUntyped);\n }\n};\n\nexports.setTyped(TYPED_OK);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/utils/common.js?");
654
655/***/ }),
656
657/***/ "./node_modules/pako/lib/utils/strings.js":
658/*!************************************************!*\
659 !*** ./node_modules/pako/lib/utils/strings.js ***!
660 \************************************************/
661/*! no static exports found */
662/***/ (function(module, exports, __webpack_require__) {
663
664"use strict";
665eval("// String encode/decode helpers\n\n\n\nvar utils = __webpack_require__(/*! ./common */ \"./node_modules/pako/lib/utils/common.js\");\n\n\n// Quick check if we can use fast array to bin string conversion\n//\n// - apply(Array) can fail on Android 2.2\n// - apply(Uint8Array) can fail on iOS 5.1 Safari\n//\nvar STR_APPLY_OK = true;\nvar STR_APPLY_UIA_OK = true;\n\ntry { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; }\ntry { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; }\n\n\n// Table with utf8 lengths (calculated by first byte of sequence)\n// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,\n// because max possible codepoint is 0x10ffff\nvar _utf8len = new utils.Buf8(256);\nfor (var q = 0; q < 256; q++) {\n _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1);\n}\n_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start\n\n\n// convert string to array (typed, when possible)\nexports.string2buf = function (str) {\n var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;\n\n // count binary size\n for (m_pos = 0; m_pos < str_len; m_pos++) {\n c = str.charCodeAt(m_pos);\n if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {\n c2 = str.charCodeAt(m_pos + 1);\n if ((c2 & 0xfc00) === 0xdc00) {\n c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);\n m_pos++;\n }\n }\n buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;\n }\n\n // allocate buffer\n buf = new utils.Buf8(buf_len);\n\n // convert\n for (i = 0, m_pos = 0; i < buf_len; m_pos++) {\n c = str.charCodeAt(m_pos);\n if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {\n c2 = str.charCodeAt(m_pos + 1);\n if ((c2 & 0xfc00) === 0xdc00) {\n c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);\n m_pos++;\n }\n }\n if (c < 0x80) {\n /* one byte */\n buf[i++] = c;\n } else if (c < 0x800) {\n /* two bytes */\n buf[i++] = 0xC0 | (c >>> 6);\n buf[i++] = 0x80 | (c & 0x3f);\n } else if (c < 0x10000) {\n /* three bytes */\n buf[i++] = 0xE0 | (c >>> 12);\n buf[i++] = 0x80 | (c >>> 6 & 0x3f);\n buf[i++] = 0x80 | (c & 0x3f);\n } else {\n /* four bytes */\n buf[i++] = 0xf0 | (c >>> 18);\n buf[i++] = 0x80 | (c >>> 12 & 0x3f);\n buf[i++] = 0x80 | (c >>> 6 & 0x3f);\n buf[i++] = 0x80 | (c & 0x3f);\n }\n }\n\n return buf;\n};\n\n// Helper (used in 2 places)\nfunction buf2binstring(buf, len) {\n // On Chrome, the arguments in a function call that are allowed is `65534`.\n // If the length of the buffer is smaller than that, we can use this optimization,\n // otherwise we will take a slower path.\n if (len < 65534) {\n if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) {\n return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len));\n }\n }\n\n var result = '';\n for (var i = 0; i < len; i++) {\n result += String.fromCharCode(buf[i]);\n }\n return result;\n}\n\n\n// Convert byte array to binary string\nexports.buf2binstring = function (buf) {\n return buf2binstring(buf, buf.length);\n};\n\n\n// Convert binary string (typed, when possible)\nexports.binstring2buf = function (str) {\n var buf = new utils.Buf8(str.length);\n for (var i = 0, len = buf.length; i < len; i++) {\n buf[i] = str.charCodeAt(i);\n }\n return buf;\n};\n\n\n// convert array to string\nexports.buf2string = function (buf, max) {\n var i, out, c, c_len;\n var len = max || buf.length;\n\n // Reserve max possible length (2 words per char)\n // NB: by unknown reasons, Array is significantly faster for\n // String.fromCharCode.apply than Uint16Array.\n var utf16buf = new Array(len * 2);\n\n for (out = 0, i = 0; i < len;) {\n c = buf[i++];\n // quick process ascii\n if (c < 0x80) { utf16buf[out++] = c; continue; }\n\n c_len = _utf8len[c];\n // skip 5 & 6 byte codes\n if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; }\n\n // apply mask on first byte\n c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;\n // join the rest\n while (c_len > 1 && i < len) {\n c = (c << 6) | (buf[i++] & 0x3f);\n c_len--;\n }\n\n // terminated by end of string?\n if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }\n\n if (c < 0x10000) {\n utf16buf[out++] = c;\n } else {\n c -= 0x10000;\n utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);\n utf16buf[out++] = 0xdc00 | (c & 0x3ff);\n }\n }\n\n return buf2binstring(utf16buf, out);\n};\n\n\n// Calculate max possible position in utf8 buffer,\n// that will not break sequence. If that's not possible\n// - (very small limits) return max size as is.\n//\n// buf[] - utf8 bytes array\n// max - length limit (mandatory);\nexports.utf8border = function (buf, max) {\n var pos;\n\n max = max || buf.length;\n if (max > buf.length) { max = buf.length; }\n\n // go back from last position, until start of sequence found\n pos = max - 1;\n while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }\n\n // Very small and broken sequence,\n // return max, because we should return something anyway.\n if (pos < 0) { return max; }\n\n // If we came to start of buffer - that means buffer is too small,\n // return max too.\n if (pos === 0) { return max; }\n\n return (pos + _utf8len[buf[pos]] > max) ? pos : max;\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/utils/strings.js?");
666
667/***/ }),
668
669/***/ "./node_modules/pako/lib/zlib/adler32.js":
670/*!***********************************************!*\
671 !*** ./node_modules/pako/lib/zlib/adler32.js ***!
672 \***********************************************/
673/*! no static exports found */
674/***/ (function(module, exports, __webpack_require__) {
675
676"use strict";
677eval("\n\n// Note: adler32 takes 12% for level 0 and 2% for level 6.\n// It isn't worth it to make additional optimizations as in original.\n// Small size is preferable.\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nfunction adler32(adler, buf, len, pos) {\n var s1 = (adler & 0xffff) |0,\n s2 = ((adler >>> 16) & 0xffff) |0,\n n = 0;\n\n while (len !== 0) {\n // Set limit ~ twice less than 5552, to keep\n // s2 in 31-bits, because we force signed ints.\n // in other case %= will fail.\n n = len > 2000 ? 2000 : len;\n len -= n;\n\n do {\n s1 = (s1 + buf[pos++]) |0;\n s2 = (s2 + s1) |0;\n } while (--n);\n\n s1 %= 65521;\n s2 %= 65521;\n }\n\n return (s1 | (s2 << 16)) |0;\n}\n\n\nmodule.exports = adler32;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/adler32.js?");
678
679/***/ }),
680
681/***/ "./node_modules/pako/lib/zlib/constants.js":
682/*!*************************************************!*\
683 !*** ./node_modules/pako/lib/zlib/constants.js ***!
684 \*************************************************/
685/*! no static exports found */
686/***/ (function(module, exports, __webpack_require__) {
687
688"use strict";
689eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nmodule.exports = {\n\n /* Allowed flush values; see deflate() and inflate() below for details */\n Z_NO_FLUSH: 0,\n Z_PARTIAL_FLUSH: 1,\n Z_SYNC_FLUSH: 2,\n Z_FULL_FLUSH: 3,\n Z_FINISH: 4,\n Z_BLOCK: 5,\n Z_TREES: 6,\n\n /* Return codes for the compression/decompression functions. Negative values\n * are errors, positive values are used for special but normal events.\n */\n Z_OK: 0,\n Z_STREAM_END: 1,\n Z_NEED_DICT: 2,\n Z_ERRNO: -1,\n Z_STREAM_ERROR: -2,\n Z_DATA_ERROR: -3,\n //Z_MEM_ERROR: -4,\n Z_BUF_ERROR: -5,\n //Z_VERSION_ERROR: -6,\n\n /* compression levels */\n Z_NO_COMPRESSION: 0,\n Z_BEST_SPEED: 1,\n Z_BEST_COMPRESSION: 9,\n Z_DEFAULT_COMPRESSION: -1,\n\n\n Z_FILTERED: 1,\n Z_HUFFMAN_ONLY: 2,\n Z_RLE: 3,\n Z_FIXED: 4,\n Z_DEFAULT_STRATEGY: 0,\n\n /* Possible values of the data_type field (though see inflate()) */\n Z_BINARY: 0,\n Z_TEXT: 1,\n //Z_ASCII: 1, // = Z_TEXT (deprecated)\n Z_UNKNOWN: 2,\n\n /* The deflate compression method */\n Z_DEFLATED: 8\n //Z_NULL: null // Use -1 or null inline, depending on var type\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/constants.js?");
690
691/***/ }),
692
693/***/ "./node_modules/pako/lib/zlib/crc32.js":
694/*!*********************************************!*\
695 !*** ./node_modules/pako/lib/zlib/crc32.js ***!
696 \*********************************************/
697/*! no static exports found */
698/***/ (function(module, exports, __webpack_require__) {
699
700"use strict";
701eval("\n\n// Note: we can't get significant speed boost here.\n// So write code to minimize size - no pregenerated tables\n// and array tools dependencies.\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\n// Use ordinary array, since untyped makes no boost here\nfunction makeTable() {\n var c, table = [];\n\n for (var n = 0; n < 256; n++) {\n c = n;\n for (var k = 0; k < 8; k++) {\n c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\n }\n table[n] = c;\n }\n\n return table;\n}\n\n// Create table on load. Just 255 signed longs. Not a problem.\nvar crcTable = makeTable();\n\n\nfunction crc32(crc, buf, len, pos) {\n var t = crcTable,\n end = pos + len;\n\n crc ^= -1;\n\n for (var i = pos; i < end; i++) {\n crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];\n }\n\n return (crc ^ (-1)); // >>> 0;\n}\n\n\nmodule.exports = crc32;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/crc32.js?");
702
703/***/ }),
704
705/***/ "./node_modules/pako/lib/zlib/gzheader.js":
706/*!************************************************!*\
707 !*** ./node_modules/pako/lib/zlib/gzheader.js ***!
708 \************************************************/
709/*! no static exports found */
710/***/ (function(module, exports, __webpack_require__) {
711
712"use strict";
713eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nfunction GZheader() {\n /* true if compressed data believed to be text */\n this.text = 0;\n /* modification time */\n this.time = 0;\n /* extra flags (not used when writing a gzip file) */\n this.xflags = 0;\n /* operating system */\n this.os = 0;\n /* pointer to extra field or Z_NULL if none */\n this.extra = null;\n /* extra field length (valid if extra != Z_NULL) */\n this.extra_len = 0; // Actually, we don't need it in JS,\n // but leave for few code modifications\n\n //\n // Setup limits is not necessary because in js we should not preallocate memory\n // for inflate use constant limit in 65536 bytes\n //\n\n /* space at extra (only when reading header) */\n // this.extra_max = 0;\n /* pointer to zero-terminated file name or Z_NULL */\n this.name = '';\n /* space at name (only when reading header) */\n // this.name_max = 0;\n /* pointer to zero-terminated comment or Z_NULL */\n this.comment = '';\n /* space at comment (only when reading header) */\n // this.comm_max = 0;\n /* true if there was or will be a header crc */\n this.hcrc = 0;\n /* true when done reading gzip header (not used when writing a gzip file) */\n this.done = false;\n}\n\nmodule.exports = GZheader;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/gzheader.js?");
714
715/***/ }),
716
717/***/ "./node_modules/pako/lib/zlib/inffast.js":
718/*!***********************************************!*\
719 !*** ./node_modules/pako/lib/zlib/inffast.js ***!
720 \***********************************************/
721/*! no static exports found */
722/***/ (function(module, exports, __webpack_require__) {
723
724"use strict";
725eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\n// See state defs from inflate.js\nvar BAD = 30; /* got a data error -- remain here until reset */\nvar TYPE = 12; /* i: waiting for type bits, including last-flag bit */\n\n/*\n Decode literal, length, and distance codes and write out the resulting\n literal and match bytes until either not enough input or output is\n available, an end-of-block is encountered, or a data error is encountered.\n When large enough input and output buffers are supplied to inflate(), for\n example, a 16K input buffer and a 64K output buffer, more than 95% of the\n inflate execution time is spent in this routine.\n\n Entry assumptions:\n\n state.mode === LEN\n strm.avail_in >= 6\n strm.avail_out >= 258\n start >= strm.avail_out\n state.bits < 8\n\n On return, state.mode is one of:\n\n LEN -- ran out of enough output space or enough available input\n TYPE -- reached end of block code, inflate() to interpret next block\n BAD -- error in block data\n\n Notes:\n\n - The maximum input bits used by a length/distance pair is 15 bits for the\n length code, 5 bits for the length extra, 15 bits for the distance code,\n and 13 bits for the distance extra. This totals 48 bits, or six bytes.\n Therefore if strm.avail_in >= 6, then there is enough input to avoid\n checking for available input while decoding.\n\n - The maximum bytes that a single length/distance pair can output is 258\n bytes, which is the maximum length that can be coded. inflate_fast()\n requires strm.avail_out >= 258 for each loop to avoid checking for\n output space.\n */\nmodule.exports = function inflate_fast(strm, start) {\n var state;\n var _in; /* local strm.input */\n var last; /* have enough input while in < last */\n var _out; /* local strm.output */\n var beg; /* inflate()'s initial strm.output */\n var end; /* while out < end, enough space available */\n//#ifdef INFLATE_STRICT\n var dmax; /* maximum distance from zlib header */\n//#endif\n var wsize; /* window size or zero if not using window */\n var whave; /* valid bytes in the window */\n var wnext; /* window write index */\n // Use `s_window` instead `window`, avoid conflict with instrumentation tools\n var s_window; /* allocated sliding window, if wsize != 0 */\n var hold; /* local strm.hold */\n var bits; /* local strm.bits */\n var lcode; /* local strm.lencode */\n var dcode; /* local strm.distcode */\n var lmask; /* mask for first level of length codes */\n var dmask; /* mask for first level of distance codes */\n var here; /* retrieved table entry */\n var op; /* code bits, operation, extra bits, or */\n /* window position, window bytes to copy */\n var len; /* match length, unused bytes */\n var dist; /* match distance */\n var from; /* where to copy match from */\n var from_source;\n\n\n var input, output; // JS specific, because we have no pointers\n\n /* copy state to local variables */\n state = strm.state;\n //here = state.here;\n _in = strm.next_in;\n input = strm.input;\n last = _in + (strm.avail_in - 5);\n _out = strm.next_out;\n output = strm.output;\n beg = _out - (start - strm.avail_out);\n end = _out + (strm.avail_out - 257);\n//#ifdef INFLATE_STRICT\n dmax = state.dmax;\n//#endif\n wsize = state.wsize;\n whave = state.whave;\n wnext = state.wnext;\n s_window = state.window;\n hold = state.hold;\n bits = state.bits;\n lcode = state.lencode;\n dcode = state.distcode;\n lmask = (1 << state.lenbits) - 1;\n dmask = (1 << state.distbits) - 1;\n\n\n /* decode literals and length/distances until end-of-block or not enough\n input data or output space */\n\n top:\n do {\n if (bits < 15) {\n hold += input[_in++] << bits;\n bits += 8;\n hold += input[_in++] << bits;\n bits += 8;\n }\n\n here = lcode[hold & lmask];\n\n dolen:\n for (;;) { // Goto emulation\n op = here >>> 24/*here.bits*/;\n hold >>>= op;\n bits -= op;\n op = (here >>> 16) & 0xff/*here.op*/;\n if (op === 0) { /* literal */\n //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?\n // \"inflate: literal '%c'\\n\" :\n // \"inflate: literal 0x%02x\\n\", here.val));\n output[_out++] = here & 0xffff/*here.val*/;\n }\n else if (op & 16) { /* length base */\n len = here & 0xffff/*here.val*/;\n op &= 15; /* number of extra bits */\n if (op) {\n if (bits < op) {\n hold += input[_in++] << bits;\n bits += 8;\n }\n len += hold & ((1 << op) - 1);\n hold >>>= op;\n bits -= op;\n }\n //Tracevv((stderr, \"inflate: length %u\\n\", len));\n if (bits < 15) {\n hold += input[_in++] << bits;\n bits += 8;\n hold += input[_in++] << bits;\n bits += 8;\n }\n here = dcode[hold & dmask];\n\n dodist:\n for (;;) { // goto emulation\n op = here >>> 24/*here.bits*/;\n hold >>>= op;\n bits -= op;\n op = (here >>> 16) & 0xff/*here.op*/;\n\n if (op & 16) { /* distance base */\n dist = here & 0xffff/*here.val*/;\n op &= 15; /* number of extra bits */\n if (bits < op) {\n hold += input[_in++] << bits;\n bits += 8;\n if (bits < op) {\n hold += input[_in++] << bits;\n bits += 8;\n }\n }\n dist += hold & ((1 << op) - 1);\n//#ifdef INFLATE_STRICT\n if (dist > dmax) {\n strm.msg = 'invalid distance too far back';\n state.mode = BAD;\n break top;\n }\n//#endif\n hold >>>= op;\n bits -= op;\n //Tracevv((stderr, \"inflate: distance %u\\n\", dist));\n op = _out - beg; /* max distance in output */\n if (dist > op) { /* see if copy from window */\n op = dist - op; /* distance back in window */\n if (op > whave) {\n if (state.sane) {\n strm.msg = 'invalid distance too far back';\n state.mode = BAD;\n break top;\n }\n\n// (!) This block is disabled in zlib defaults,\n// don't enable it for binary compatibility\n//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR\n// if (len <= op - whave) {\n// do {\n// output[_out++] = 0;\n// } while (--len);\n// continue top;\n// }\n// len -= op - whave;\n// do {\n// output[_out++] = 0;\n// } while (--op > whave);\n// if (op === 0) {\n// from = _out - dist;\n// do {\n// output[_out++] = output[from++];\n// } while (--len);\n// continue top;\n// }\n//#endif\n }\n from = 0; // window index\n from_source = s_window;\n if (wnext === 0) { /* very common case */\n from += wsize - op;\n if (op < len) { /* some from window */\n len -= op;\n do {\n output[_out++] = s_window[from++];\n } while (--op);\n from = _out - dist; /* rest from output */\n from_source = output;\n }\n }\n else if (wnext < op) { /* wrap around window */\n from += wsize + wnext - op;\n op -= wnext;\n if (op < len) { /* some from end of window */\n len -= op;\n do {\n output[_out++] = s_window[from++];\n } while (--op);\n from = 0;\n if (wnext < len) { /* some from start of window */\n op = wnext;\n len -= op;\n do {\n output[_out++] = s_window[from++];\n } while (--op);\n from = _out - dist; /* rest from output */\n from_source = output;\n }\n }\n }\n else { /* contiguous in window */\n from += wnext - op;\n if (op < len) { /* some from window */\n len -= op;\n do {\n output[_out++] = s_window[from++];\n } while (--op);\n from = _out - dist; /* rest from output */\n from_source = output;\n }\n }\n while (len > 2) {\n output[_out++] = from_source[from++];\n output[_out++] = from_source[from++];\n output[_out++] = from_source[from++];\n len -= 3;\n }\n if (len) {\n output[_out++] = from_source[from++];\n if (len > 1) {\n output[_out++] = from_source[from++];\n }\n }\n }\n else {\n from = _out - dist; /* copy direct from output */\n do { /* minimum length is three */\n output[_out++] = output[from++];\n output[_out++] = output[from++];\n output[_out++] = output[from++];\n len -= 3;\n } while (len > 2);\n if (len) {\n output[_out++] = output[from++];\n if (len > 1) {\n output[_out++] = output[from++];\n }\n }\n }\n }\n else if ((op & 64) === 0) { /* 2nd level distance code */\n here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];\n continue dodist;\n }\n else {\n strm.msg = 'invalid distance code';\n state.mode = BAD;\n break top;\n }\n\n break; // need to emulate goto via \"continue\"\n }\n }\n else if ((op & 64) === 0) { /* 2nd level length code */\n here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];\n continue dolen;\n }\n else if (op & 32) { /* end-of-block */\n //Tracevv((stderr, \"inflate: end of block\\n\"));\n state.mode = TYPE;\n break top;\n }\n else {\n strm.msg = 'invalid literal/length code';\n state.mode = BAD;\n break top;\n }\n\n break; // need to emulate goto via \"continue\"\n }\n } while (_in < last && _out < end);\n\n /* return unused bytes (on entry, bits < 8, so in won't go too far back) */\n len = bits >> 3;\n _in -= len;\n bits -= len << 3;\n hold &= (1 << bits) - 1;\n\n /* update state and return */\n strm.next_in = _in;\n strm.next_out = _out;\n strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last));\n strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end));\n state.hold = hold;\n state.bits = bits;\n return;\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inffast.js?");
726
727/***/ }),
728
729/***/ "./node_modules/pako/lib/zlib/inflate.js":
730/*!***********************************************!*\
731 !*** ./node_modules/pako/lib/zlib/inflate.js ***!
732 \***********************************************/
733/*! no static exports found */
734/***/ (function(module, exports, __webpack_require__) {
735
736"use strict";
737eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nvar utils = __webpack_require__(/*! ../utils/common */ \"./node_modules/pako/lib/utils/common.js\");\nvar adler32 = __webpack_require__(/*! ./adler32 */ \"./node_modules/pako/lib/zlib/adler32.js\");\nvar crc32 = __webpack_require__(/*! ./crc32 */ \"./node_modules/pako/lib/zlib/crc32.js\");\nvar inflate_fast = __webpack_require__(/*! ./inffast */ \"./node_modules/pako/lib/zlib/inffast.js\");\nvar inflate_table = __webpack_require__(/*! ./inftrees */ \"./node_modules/pako/lib/zlib/inftrees.js\");\n\nvar CODES = 0;\nvar LENS = 1;\nvar DISTS = 2;\n\n/* Public constants ==========================================================*/\n/* ===========================================================================*/\n\n\n/* Allowed flush values; see deflate() and inflate() below for details */\n//var Z_NO_FLUSH = 0;\n//var Z_PARTIAL_FLUSH = 1;\n//var Z_SYNC_FLUSH = 2;\n//var Z_FULL_FLUSH = 3;\nvar Z_FINISH = 4;\nvar Z_BLOCK = 5;\nvar Z_TREES = 6;\n\n\n/* Return codes for the compression/decompression functions. Negative values\n * are errors, positive values are used for special but normal events.\n */\nvar Z_OK = 0;\nvar Z_STREAM_END = 1;\nvar Z_NEED_DICT = 2;\n//var Z_ERRNO = -1;\nvar Z_STREAM_ERROR = -2;\nvar Z_DATA_ERROR = -3;\nvar Z_MEM_ERROR = -4;\nvar Z_BUF_ERROR = -5;\n//var Z_VERSION_ERROR = -6;\n\n/* The deflate compression method */\nvar Z_DEFLATED = 8;\n\n\n/* STATES ====================================================================*/\n/* ===========================================================================*/\n\n\nvar HEAD = 1; /* i: waiting for magic header */\nvar FLAGS = 2; /* i: waiting for method and flags (gzip) */\nvar TIME = 3; /* i: waiting for modification time (gzip) */\nvar OS = 4; /* i: waiting for extra flags and operating system (gzip) */\nvar EXLEN = 5; /* i: waiting for extra length (gzip) */\nvar EXTRA = 6; /* i: waiting for extra bytes (gzip) */\nvar NAME = 7; /* i: waiting for end of file name (gzip) */\nvar COMMENT = 8; /* i: waiting for end of comment (gzip) */\nvar HCRC = 9; /* i: waiting for header crc (gzip) */\nvar DICTID = 10; /* i: waiting for dictionary check value */\nvar DICT = 11; /* waiting for inflateSetDictionary() call */\nvar TYPE = 12; /* i: waiting for type bits, including last-flag bit */\nvar TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */\nvar STORED = 14; /* i: waiting for stored size (length and complement) */\nvar COPY_ = 15; /* i/o: same as COPY below, but only first time in */\nvar COPY = 16; /* i/o: waiting for input or output to copy stored block */\nvar TABLE = 17; /* i: waiting for dynamic block table lengths */\nvar LENLENS = 18; /* i: waiting for code length code lengths */\nvar CODELENS = 19; /* i: waiting for length/lit and distance code lengths */\nvar LEN_ = 20; /* i: same as LEN below, but only first time in */\nvar LEN = 21; /* i: waiting for length/lit/eob code */\nvar LENEXT = 22; /* i: waiting for length extra bits */\nvar DIST = 23; /* i: waiting for distance code */\nvar DISTEXT = 24; /* i: waiting for distance extra bits */\nvar MATCH = 25; /* o: waiting for output space to copy string */\nvar LIT = 26; /* o: waiting for output space to write literal */\nvar CHECK = 27; /* i: waiting for 32-bit check value */\nvar LENGTH = 28; /* i: waiting for 32-bit length (gzip) */\nvar DONE = 29; /* finished check, done -- remain here until reset */\nvar BAD = 30; /* got a data error -- remain here until reset */\nvar MEM = 31; /* got an inflate() memory error -- remain here until reset */\nvar SYNC = 32; /* looking for synchronization bytes to restart inflate() */\n\n/* ===========================================================================*/\n\n\n\nvar ENOUGH_LENS = 852;\nvar ENOUGH_DISTS = 592;\n//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);\n\nvar MAX_WBITS = 15;\n/* 32K LZ77 window */\nvar DEF_WBITS = MAX_WBITS;\n\n\nfunction zswap32(q) {\n return (((q >>> 24) & 0xff) +\n ((q >>> 8) & 0xff00) +\n ((q & 0xff00) << 8) +\n ((q & 0xff) << 24));\n}\n\n\nfunction InflateState() {\n this.mode = 0; /* current inflate mode */\n this.last = false; /* true if processing last block */\n this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */\n this.havedict = false; /* true if dictionary provided */\n this.flags = 0; /* gzip header method and flags (0 if zlib) */\n this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */\n this.check = 0; /* protected copy of check value */\n this.total = 0; /* protected copy of output count */\n // TODO: may be {}\n this.head = null; /* where to save gzip header information */\n\n /* sliding window */\n this.wbits = 0; /* log base 2 of requested window size */\n this.wsize = 0; /* window size or zero if not using window */\n this.whave = 0; /* valid bytes in the window */\n this.wnext = 0; /* window write index */\n this.window = null; /* allocated sliding window, if needed */\n\n /* bit accumulator */\n this.hold = 0; /* input bit accumulator */\n this.bits = 0; /* number of bits in \"in\" */\n\n /* for string and stored block copying */\n this.length = 0; /* literal or length of data to copy */\n this.offset = 0; /* distance back to copy string from */\n\n /* for table and code decoding */\n this.extra = 0; /* extra bits needed */\n\n /* fixed and dynamic code tables */\n this.lencode = null; /* starting table for length/literal codes */\n this.distcode = null; /* starting table for distance codes */\n this.lenbits = 0; /* index bits for lencode */\n this.distbits = 0; /* index bits for distcode */\n\n /* dynamic table building */\n this.ncode = 0; /* number of code length code lengths */\n this.nlen = 0; /* number of length code lengths */\n this.ndist = 0; /* number of distance code lengths */\n this.have = 0; /* number of code lengths in lens[] */\n this.next = null; /* next available space in codes[] */\n\n this.lens = new utils.Buf16(320); /* temporary storage for code lengths */\n this.work = new utils.Buf16(288); /* work area for code table building */\n\n /*\n because we don't have pointers in js, we use lencode and distcode directly\n as buffers so we don't need codes\n */\n //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */\n this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */\n this.distdyn = null; /* dynamic table for distance codes (JS specific) */\n this.sane = 0; /* if false, allow invalid distance too far */\n this.back = 0; /* bits back of last unprocessed length/lit */\n this.was = 0; /* initial length of match */\n}\n\nfunction inflateResetKeep(strm) {\n var state;\n\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\n state = strm.state;\n strm.total_in = strm.total_out = state.total = 0;\n strm.msg = ''; /*Z_NULL*/\n if (state.wrap) { /* to support ill-conceived Java test suite */\n strm.adler = state.wrap & 1;\n }\n state.mode = HEAD;\n state.last = 0;\n state.havedict = 0;\n state.dmax = 32768;\n state.head = null/*Z_NULL*/;\n state.hold = 0;\n state.bits = 0;\n //state.lencode = state.distcode = state.next = state.codes;\n state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS);\n state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS);\n\n state.sane = 1;\n state.back = -1;\n //Tracev((stderr, \"inflate: reset\\n\"));\n return Z_OK;\n}\n\nfunction inflateReset(strm) {\n var state;\n\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\n state = strm.state;\n state.wsize = 0;\n state.whave = 0;\n state.wnext = 0;\n return inflateResetKeep(strm);\n\n}\n\nfunction inflateReset2(strm, windowBits) {\n var wrap;\n var state;\n\n /* get the state */\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\n state = strm.state;\n\n /* extract wrap request from windowBits parameter */\n if (windowBits < 0) {\n wrap = 0;\n windowBits = -windowBits;\n }\n else {\n wrap = (windowBits >> 4) + 1;\n if (windowBits < 48) {\n windowBits &= 15;\n }\n }\n\n /* set number of window bits, free window if different */\n if (windowBits && (windowBits < 8 || windowBits > 15)) {\n return Z_STREAM_ERROR;\n }\n if (state.window !== null && state.wbits !== windowBits) {\n state.window = null;\n }\n\n /* update state and reset the rest of it */\n state.wrap = wrap;\n state.wbits = windowBits;\n return inflateReset(strm);\n}\n\nfunction inflateInit2(strm, windowBits) {\n var ret;\n var state;\n\n if (!strm) { return Z_STREAM_ERROR; }\n //strm.msg = Z_NULL; /* in case we return an error */\n\n state = new InflateState();\n\n //if (state === Z_NULL) return Z_MEM_ERROR;\n //Tracev((stderr, \"inflate: allocated\\n\"));\n strm.state = state;\n state.window = null/*Z_NULL*/;\n ret = inflateReset2(strm, windowBits);\n if (ret !== Z_OK) {\n strm.state = null/*Z_NULL*/;\n }\n return ret;\n}\n\nfunction inflateInit(strm) {\n return inflateInit2(strm, DEF_WBITS);\n}\n\n\n/*\n Return state with length and distance decoding tables and index sizes set to\n fixed code decoding. Normally this returns fixed tables from inffixed.h.\n If BUILDFIXED is defined, then instead this routine builds the tables the\n first time it's called, and returns those tables the first time and\n thereafter. This reduces the size of the code by about 2K bytes, in\n exchange for a little execution time. However, BUILDFIXED should not be\n used for threaded applications, since the rewriting of the tables and virgin\n may not be thread-safe.\n */\nvar virgin = true;\n\nvar lenfix, distfix; // We have no pointers in JS, so keep tables separate\n\nfunction fixedtables(state) {\n /* build fixed huffman tables if first call (may not be thread safe) */\n if (virgin) {\n var sym;\n\n lenfix = new utils.Buf32(512);\n distfix = new utils.Buf32(32);\n\n /* literal/length table */\n sym = 0;\n while (sym < 144) { state.lens[sym++] = 8; }\n while (sym < 256) { state.lens[sym++] = 9; }\n while (sym < 280) { state.lens[sym++] = 7; }\n while (sym < 288) { state.lens[sym++] = 8; }\n\n inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 });\n\n /* distance table */\n sym = 0;\n while (sym < 32) { state.lens[sym++] = 5; }\n\n inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 });\n\n /* do this just once */\n virgin = false;\n }\n\n state.lencode = lenfix;\n state.lenbits = 9;\n state.distcode = distfix;\n state.distbits = 5;\n}\n\n\n/*\n Update the window with the last wsize (normally 32K) bytes written before\n returning. If window does not exist yet, create it. This is only called\n when a window is already in use, or when output has been written during this\n inflate call, but the end of the deflate stream has not been reached yet.\n It is also called to create a window for dictionary data when a dictionary\n is loaded.\n\n Providing output buffers larger than 32K to inflate() should provide a speed\n advantage, since only the last 32K of output is copied to the sliding window\n upon return from inflate(), and since all distances after the first 32K of\n output will fall in the output data, making match copies simpler and faster.\n The advantage may be dependent on the size of the processor's data caches.\n */\nfunction updatewindow(strm, src, end, copy) {\n var dist;\n var state = strm.state;\n\n /* if it hasn't been done already, allocate space for the window */\n if (state.window === null) {\n state.wsize = 1 << state.wbits;\n state.wnext = 0;\n state.whave = 0;\n\n state.window = new utils.Buf8(state.wsize);\n }\n\n /* copy state->wsize or less output bytes into the circular window */\n if (copy >= state.wsize) {\n utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0);\n state.wnext = 0;\n state.whave = state.wsize;\n }\n else {\n dist = state.wsize - state.wnext;\n if (dist > copy) {\n dist = copy;\n }\n //zmemcpy(state->window + state->wnext, end - copy, dist);\n utils.arraySet(state.window, src, end - copy, dist, state.wnext);\n copy -= dist;\n if (copy) {\n //zmemcpy(state->window, end - copy, copy);\n utils.arraySet(state.window, src, end - copy, copy, 0);\n state.wnext = copy;\n state.whave = state.wsize;\n }\n else {\n state.wnext += dist;\n if (state.wnext === state.wsize) { state.wnext = 0; }\n if (state.whave < state.wsize) { state.whave += dist; }\n }\n }\n return 0;\n}\n\nfunction inflate(strm, flush) {\n var state;\n var input, output; // input/output buffers\n var next; /* next input INDEX */\n var put; /* next output INDEX */\n var have, left; /* available input and output */\n var hold; /* bit buffer */\n var bits; /* bits in bit buffer */\n var _in, _out; /* save starting available input and output */\n var copy; /* number of stored or match bytes to copy */\n var from; /* where to copy match bytes from */\n var from_source;\n var here = 0; /* current decoding table entry */\n var here_bits, here_op, here_val; // paked \"here\" denormalized (JS specific)\n //var last; /* parent table entry */\n var last_bits, last_op, last_val; // paked \"last\" denormalized (JS specific)\n var len; /* length to copy for repeats, bits to drop */\n var ret; /* return code */\n var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */\n var opts;\n\n var n; // temporary var for NEED_BITS\n\n var order = /* permutation of code lengths */\n [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ];\n\n\n if (!strm || !strm.state || !strm.output ||\n (!strm.input && strm.avail_in !== 0)) {\n return Z_STREAM_ERROR;\n }\n\n state = strm.state;\n if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */\n\n\n //--- LOAD() ---\n put = strm.next_out;\n output = strm.output;\n left = strm.avail_out;\n next = strm.next_in;\n input = strm.input;\n have = strm.avail_in;\n hold = state.hold;\n bits = state.bits;\n //---\n\n _in = have;\n _out = left;\n ret = Z_OK;\n\n inf_leave: // goto emulation\n for (;;) {\n switch (state.mode) {\n case HEAD:\n if (state.wrap === 0) {\n state.mode = TYPEDO;\n break;\n }\n //=== NEEDBITS(16);\n while (bits < 16) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */\n state.check = 0/*crc32(0L, Z_NULL, 0)*/;\n //=== CRC2(state.check, hold);\n hbuf[0] = hold & 0xff;\n hbuf[1] = (hold >>> 8) & 0xff;\n state.check = crc32(state.check, hbuf, 2, 0);\n //===//\n\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = FLAGS;\n break;\n }\n state.flags = 0; /* expect zlib header */\n if (state.head) {\n state.head.done = false;\n }\n if (!(state.wrap & 1) || /* check if zlib header allowed */\n (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) {\n strm.msg = 'incorrect header check';\n state.mode = BAD;\n break;\n }\n if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) {\n strm.msg = 'unknown compression method';\n state.mode = BAD;\n break;\n }\n //--- DROPBITS(4) ---//\n hold >>>= 4;\n bits -= 4;\n //---//\n len = (hold & 0x0f)/*BITS(4)*/ + 8;\n if (state.wbits === 0) {\n state.wbits = len;\n }\n else if (len > state.wbits) {\n strm.msg = 'invalid window size';\n state.mode = BAD;\n break;\n }\n state.dmax = 1 << len;\n //Tracev((stderr, \"inflate: zlib header ok\\n\"));\n strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;\n state.mode = hold & 0x200 ? DICTID : TYPE;\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n break;\n case FLAGS:\n //=== NEEDBITS(16); */\n while (bits < 16) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.flags = hold;\n if ((state.flags & 0xff) !== Z_DEFLATED) {\n strm.msg = 'unknown compression method';\n state.mode = BAD;\n break;\n }\n if (state.flags & 0xe000) {\n strm.msg = 'unknown header flags set';\n state.mode = BAD;\n break;\n }\n if (state.head) {\n state.head.text = ((hold >> 8) & 1);\n }\n if (state.flags & 0x0200) {\n //=== CRC2(state.check, hold);\n hbuf[0] = hold & 0xff;\n hbuf[1] = (hold >>> 8) & 0xff;\n state.check = crc32(state.check, hbuf, 2, 0);\n //===//\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = TIME;\n /* falls through */\n case TIME:\n //=== NEEDBITS(32); */\n while (bits < 32) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if (state.head) {\n state.head.time = hold;\n }\n if (state.flags & 0x0200) {\n //=== CRC4(state.check, hold)\n hbuf[0] = hold & 0xff;\n hbuf[1] = (hold >>> 8) & 0xff;\n hbuf[2] = (hold >>> 16) & 0xff;\n hbuf[3] = (hold >>> 24) & 0xff;\n state.check = crc32(state.check, hbuf, 4, 0);\n //===\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = OS;\n /* falls through */\n case OS:\n //=== NEEDBITS(16); */\n while (bits < 16) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if (state.head) {\n state.head.xflags = (hold & 0xff);\n state.head.os = (hold >> 8);\n }\n if (state.flags & 0x0200) {\n //=== CRC2(state.check, hold);\n hbuf[0] = hold & 0xff;\n hbuf[1] = (hold >>> 8) & 0xff;\n state.check = crc32(state.check, hbuf, 2, 0);\n //===//\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = EXLEN;\n /* falls through */\n case EXLEN:\n if (state.flags & 0x0400) {\n //=== NEEDBITS(16); */\n while (bits < 16) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.length = hold;\n if (state.head) {\n state.head.extra_len = hold;\n }\n if (state.flags & 0x0200) {\n //=== CRC2(state.check, hold);\n hbuf[0] = hold & 0xff;\n hbuf[1] = (hold >>> 8) & 0xff;\n state.check = crc32(state.check, hbuf, 2, 0);\n //===//\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n }\n else if (state.head) {\n state.head.extra = null/*Z_NULL*/;\n }\n state.mode = EXTRA;\n /* falls through */\n case EXTRA:\n if (state.flags & 0x0400) {\n copy = state.length;\n if (copy > have) { copy = have; }\n if (copy) {\n if (state.head) {\n len = state.head.extra_len - state.length;\n if (!state.head.extra) {\n // Use untyped array for more convenient processing later\n state.head.extra = new Array(state.head.extra_len);\n }\n utils.arraySet(\n state.head.extra,\n input,\n next,\n // extra field is limited to 65536 bytes\n // - no need for additional size check\n copy,\n /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/\n len\n );\n //zmemcpy(state.head.extra + len, next,\n // len + copy > state.head.extra_max ?\n // state.head.extra_max - len : copy);\n }\n if (state.flags & 0x0200) {\n state.check = crc32(state.check, input, copy, next);\n }\n have -= copy;\n next += copy;\n state.length -= copy;\n }\n if (state.length) { break inf_leave; }\n }\n state.length = 0;\n state.mode = NAME;\n /* falls through */\n case NAME:\n if (state.flags & 0x0800) {\n if (have === 0) { break inf_leave; }\n copy = 0;\n do {\n // TODO: 2 or 1 bytes?\n len = input[next + copy++];\n /* use constant limit because in js we should not preallocate memory */\n if (state.head && len &&\n (state.length < 65536 /*state.head.name_max*/)) {\n state.head.name += String.fromCharCode(len);\n }\n } while (len && copy < have);\n\n if (state.flags & 0x0200) {\n state.check = crc32(state.check, input, copy, next);\n }\n have -= copy;\n next += copy;\n if (len) { break inf_leave; }\n }\n else if (state.head) {\n state.head.name = null;\n }\n state.length = 0;\n state.mode = COMMENT;\n /* falls through */\n case COMMENT:\n if (state.flags & 0x1000) {\n if (have === 0) { break inf_leave; }\n copy = 0;\n do {\n len = input[next + copy++];\n /* use constant limit because in js we should not preallocate memory */\n if (state.head && len &&\n (state.length < 65536 /*state.head.comm_max*/)) {\n state.head.comment += String.fromCharCode(len);\n }\n } while (len && copy < have);\n if (state.flags & 0x0200) {\n state.check = crc32(state.check, input, copy, next);\n }\n have -= copy;\n next += copy;\n if (len) { break inf_leave; }\n }\n else if (state.head) {\n state.head.comment = null;\n }\n state.mode = HCRC;\n /* falls through */\n case HCRC:\n if (state.flags & 0x0200) {\n //=== NEEDBITS(16); */\n while (bits < 16) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if (hold !== (state.check & 0xffff)) {\n strm.msg = 'header crc mismatch';\n state.mode = BAD;\n break;\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n }\n if (state.head) {\n state.head.hcrc = ((state.flags >> 9) & 1);\n state.head.done = true;\n }\n strm.adler = state.check = 0;\n state.mode = TYPE;\n break;\n case DICTID:\n //=== NEEDBITS(32); */\n while (bits < 32) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n strm.adler = state.check = zswap32(hold);\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = DICT;\n /* falls through */\n case DICT:\n if (state.havedict === 0) {\n //--- RESTORE() ---\n strm.next_out = put;\n strm.avail_out = left;\n strm.next_in = next;\n strm.avail_in = have;\n state.hold = hold;\n state.bits = bits;\n //---\n return Z_NEED_DICT;\n }\n strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;\n state.mode = TYPE;\n /* falls through */\n case TYPE:\n if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; }\n /* falls through */\n case TYPEDO:\n if (state.last) {\n //--- BYTEBITS() ---//\n hold >>>= bits & 7;\n bits -= bits & 7;\n //---//\n state.mode = CHECK;\n break;\n }\n //=== NEEDBITS(3); */\n while (bits < 3) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.last = (hold & 0x01)/*BITS(1)*/;\n //--- DROPBITS(1) ---//\n hold >>>= 1;\n bits -= 1;\n //---//\n\n switch ((hold & 0x03)/*BITS(2)*/) {\n case 0: /* stored block */\n //Tracev((stderr, \"inflate: stored block%s\\n\",\n // state.last ? \" (last)\" : \"\"));\n state.mode = STORED;\n break;\n case 1: /* fixed block */\n fixedtables(state);\n //Tracev((stderr, \"inflate: fixed codes block%s\\n\",\n // state.last ? \" (last)\" : \"\"));\n state.mode = LEN_; /* decode codes */\n if (flush === Z_TREES) {\n //--- DROPBITS(2) ---//\n hold >>>= 2;\n bits -= 2;\n //---//\n break inf_leave;\n }\n break;\n case 2: /* dynamic block */\n //Tracev((stderr, \"inflate: dynamic codes block%s\\n\",\n // state.last ? \" (last)\" : \"\"));\n state.mode = TABLE;\n break;\n case 3:\n strm.msg = 'invalid block type';\n state.mode = BAD;\n }\n //--- DROPBITS(2) ---//\n hold >>>= 2;\n bits -= 2;\n //---//\n break;\n case STORED:\n //--- BYTEBITS() ---// /* go to byte boundary */\n hold >>>= bits & 7;\n bits -= bits & 7;\n //---//\n //=== NEEDBITS(32); */\n while (bits < 32) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) {\n strm.msg = 'invalid stored block lengths';\n state.mode = BAD;\n break;\n }\n state.length = hold & 0xffff;\n //Tracev((stderr, \"inflate: stored length %u\\n\",\n // state.length));\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n state.mode = COPY_;\n if (flush === Z_TREES) { break inf_leave; }\n /* falls through */\n case COPY_:\n state.mode = COPY;\n /* falls through */\n case COPY:\n copy = state.length;\n if (copy) {\n if (copy > have) { copy = have; }\n if (copy > left) { copy = left; }\n if (copy === 0) { break inf_leave; }\n //--- zmemcpy(put, next, copy); ---\n utils.arraySet(output, input, next, copy, put);\n //---//\n have -= copy;\n next += copy;\n left -= copy;\n put += copy;\n state.length -= copy;\n break;\n }\n //Tracev((stderr, \"inflate: stored end\\n\"));\n state.mode = TYPE;\n break;\n case TABLE:\n //=== NEEDBITS(14); */\n while (bits < 14) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257;\n //--- DROPBITS(5) ---//\n hold >>>= 5;\n bits -= 5;\n //---//\n state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1;\n //--- DROPBITS(5) ---//\n hold >>>= 5;\n bits -= 5;\n //---//\n state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4;\n //--- DROPBITS(4) ---//\n hold >>>= 4;\n bits -= 4;\n //---//\n//#ifndef PKZIP_BUG_WORKAROUND\n if (state.nlen > 286 || state.ndist > 30) {\n strm.msg = 'too many length or distance symbols';\n state.mode = BAD;\n break;\n }\n//#endif\n //Tracev((stderr, \"inflate: table sizes ok\\n\"));\n state.have = 0;\n state.mode = LENLENS;\n /* falls through */\n case LENLENS:\n while (state.have < state.ncode) {\n //=== NEEDBITS(3);\n while (bits < 3) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.lens[order[state.have++]] = (hold & 0x07);//BITS(3);\n //--- DROPBITS(3) ---//\n hold >>>= 3;\n bits -= 3;\n //---//\n }\n while (state.have < 19) {\n state.lens[order[state.have++]] = 0;\n }\n // We have separate tables & no pointers. 2 commented lines below not needed.\n //state.next = state.codes;\n //state.lencode = state.next;\n // Switch to use dynamic table\n state.lencode = state.lendyn;\n state.lenbits = 7;\n\n opts = { bits: state.lenbits };\n ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts);\n state.lenbits = opts.bits;\n\n if (ret) {\n strm.msg = 'invalid code lengths set';\n state.mode = BAD;\n break;\n }\n //Tracev((stderr, \"inflate: code lengths ok\\n\"));\n state.have = 0;\n state.mode = CODELENS;\n /* falls through */\n case CODELENS:\n while (state.have < state.nlen + state.ndist) {\n for (;;) {\n here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/\n here_bits = here >>> 24;\n here_op = (here >>> 16) & 0xff;\n here_val = here & 0xffff;\n\n if ((here_bits) <= bits) { break; }\n //--- PULLBYTE() ---//\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n //---//\n }\n if (here_val < 16) {\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n state.lens[state.have++] = here_val;\n }\n else {\n if (here_val === 16) {\n //=== NEEDBITS(here.bits + 2);\n n = here_bits + 2;\n while (bits < n) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n if (state.have === 0) {\n strm.msg = 'invalid bit length repeat';\n state.mode = BAD;\n break;\n }\n len = state.lens[state.have - 1];\n copy = 3 + (hold & 0x03);//BITS(2);\n //--- DROPBITS(2) ---//\n hold >>>= 2;\n bits -= 2;\n //---//\n }\n else if (here_val === 17) {\n //=== NEEDBITS(here.bits + 3);\n n = here_bits + 3;\n while (bits < n) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n len = 0;\n copy = 3 + (hold & 0x07);//BITS(3);\n //--- DROPBITS(3) ---//\n hold >>>= 3;\n bits -= 3;\n //---//\n }\n else {\n //=== NEEDBITS(here.bits + 7);\n n = here_bits + 7;\n while (bits < n) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n len = 0;\n copy = 11 + (hold & 0x7f);//BITS(7);\n //--- DROPBITS(7) ---//\n hold >>>= 7;\n bits -= 7;\n //---//\n }\n if (state.have + copy > state.nlen + state.ndist) {\n strm.msg = 'invalid bit length repeat';\n state.mode = BAD;\n break;\n }\n while (copy--) {\n state.lens[state.have++] = len;\n }\n }\n }\n\n /* handle error breaks in while */\n if (state.mode === BAD) { break; }\n\n /* check for end-of-block code (better have one) */\n if (state.lens[256] === 0) {\n strm.msg = 'invalid code -- missing end-of-block';\n state.mode = BAD;\n break;\n }\n\n /* build code tables -- note: do not change the lenbits or distbits\n values here (9 and 6) without reading the comments in inftrees.h\n concerning the ENOUGH constants, which depend on those values */\n state.lenbits = 9;\n\n opts = { bits: state.lenbits };\n ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts);\n // We have separate tables & no pointers. 2 commented lines below not needed.\n // state.next_index = opts.table_index;\n state.lenbits = opts.bits;\n // state.lencode = state.next;\n\n if (ret) {\n strm.msg = 'invalid literal/lengths set';\n state.mode = BAD;\n break;\n }\n\n state.distbits = 6;\n //state.distcode.copy(state.codes);\n // Switch to use dynamic table\n state.distcode = state.distdyn;\n opts = { bits: state.distbits };\n ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts);\n // We have separate tables & no pointers. 2 commented lines below not needed.\n // state.next_index = opts.table_index;\n state.distbits = opts.bits;\n // state.distcode = state.next;\n\n if (ret) {\n strm.msg = 'invalid distances set';\n state.mode = BAD;\n break;\n }\n //Tracev((stderr, 'inflate: codes ok\\n'));\n state.mode = LEN_;\n if (flush === Z_TREES) { break inf_leave; }\n /* falls through */\n case LEN_:\n state.mode = LEN;\n /* falls through */\n case LEN:\n if (have >= 6 && left >= 258) {\n //--- RESTORE() ---\n strm.next_out = put;\n strm.avail_out = left;\n strm.next_in = next;\n strm.avail_in = have;\n state.hold = hold;\n state.bits = bits;\n //---\n inflate_fast(strm, _out);\n //--- LOAD() ---\n put = strm.next_out;\n output = strm.output;\n left = strm.avail_out;\n next = strm.next_in;\n input = strm.input;\n have = strm.avail_in;\n hold = state.hold;\n bits = state.bits;\n //---\n\n if (state.mode === TYPE) {\n state.back = -1;\n }\n break;\n }\n state.back = 0;\n for (;;) {\n here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/\n here_bits = here >>> 24;\n here_op = (here >>> 16) & 0xff;\n here_val = here & 0xffff;\n\n if (here_bits <= bits) { break; }\n //--- PULLBYTE() ---//\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n //---//\n }\n if (here_op && (here_op & 0xf0) === 0) {\n last_bits = here_bits;\n last_op = here_op;\n last_val = here_val;\n for (;;) {\n here = state.lencode[last_val +\n ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];\n here_bits = here >>> 24;\n here_op = (here >>> 16) & 0xff;\n here_val = here & 0xffff;\n\n if ((last_bits + here_bits) <= bits) { break; }\n //--- PULLBYTE() ---//\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n //---//\n }\n //--- DROPBITS(last.bits) ---//\n hold >>>= last_bits;\n bits -= last_bits;\n //---//\n state.back += last_bits;\n }\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n state.back += here_bits;\n state.length = here_val;\n if (here_op === 0) {\n //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?\n // \"inflate: literal '%c'\\n\" :\n // \"inflate: literal 0x%02x\\n\", here.val));\n state.mode = LIT;\n break;\n }\n if (here_op & 32) {\n //Tracevv((stderr, \"inflate: end of block\\n\"));\n state.back = -1;\n state.mode = TYPE;\n break;\n }\n if (here_op & 64) {\n strm.msg = 'invalid literal/length code';\n state.mode = BAD;\n break;\n }\n state.extra = here_op & 15;\n state.mode = LENEXT;\n /* falls through */\n case LENEXT:\n if (state.extra) {\n //=== NEEDBITS(state.extra);\n n = state.extra;\n while (bits < n) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;\n //--- DROPBITS(state.extra) ---//\n hold >>>= state.extra;\n bits -= state.extra;\n //---//\n state.back += state.extra;\n }\n //Tracevv((stderr, \"inflate: length %u\\n\", state.length));\n state.was = state.length;\n state.mode = DIST;\n /* falls through */\n case DIST:\n for (;;) {\n here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/\n here_bits = here >>> 24;\n here_op = (here >>> 16) & 0xff;\n here_val = here & 0xffff;\n\n if ((here_bits) <= bits) { break; }\n //--- PULLBYTE() ---//\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n //---//\n }\n if ((here_op & 0xf0) === 0) {\n last_bits = here_bits;\n last_op = here_op;\n last_val = here_val;\n for (;;) {\n here = state.distcode[last_val +\n ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];\n here_bits = here >>> 24;\n here_op = (here >>> 16) & 0xff;\n here_val = here & 0xffff;\n\n if ((last_bits + here_bits) <= bits) { break; }\n //--- PULLBYTE() ---//\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n //---//\n }\n //--- DROPBITS(last.bits) ---//\n hold >>>= last_bits;\n bits -= last_bits;\n //---//\n state.back += last_bits;\n }\n //--- DROPBITS(here.bits) ---//\n hold >>>= here_bits;\n bits -= here_bits;\n //---//\n state.back += here_bits;\n if (here_op & 64) {\n strm.msg = 'invalid distance code';\n state.mode = BAD;\n break;\n }\n state.offset = here_val;\n state.extra = (here_op) & 15;\n state.mode = DISTEXT;\n /* falls through */\n case DISTEXT:\n if (state.extra) {\n //=== NEEDBITS(state.extra);\n n = state.extra;\n while (bits < n) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;\n //--- DROPBITS(state.extra) ---//\n hold >>>= state.extra;\n bits -= state.extra;\n //---//\n state.back += state.extra;\n }\n//#ifdef INFLATE_STRICT\n if (state.offset > state.dmax) {\n strm.msg = 'invalid distance too far back';\n state.mode = BAD;\n break;\n }\n//#endif\n //Tracevv((stderr, \"inflate: distance %u\\n\", state.offset));\n state.mode = MATCH;\n /* falls through */\n case MATCH:\n if (left === 0) { break inf_leave; }\n copy = _out - left;\n if (state.offset > copy) { /* copy from window */\n copy = state.offset - copy;\n if (copy > state.whave) {\n if (state.sane) {\n strm.msg = 'invalid distance too far back';\n state.mode = BAD;\n break;\n }\n// (!) This block is disabled in zlib defaults,\n// don't enable it for binary compatibility\n//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR\n// Trace((stderr, \"inflate.c too far\\n\"));\n// copy -= state.whave;\n// if (copy > state.length) { copy = state.length; }\n// if (copy > left) { copy = left; }\n// left -= copy;\n// state.length -= copy;\n// do {\n// output[put++] = 0;\n// } while (--copy);\n// if (state.length === 0) { state.mode = LEN; }\n// break;\n//#endif\n }\n if (copy > state.wnext) {\n copy -= state.wnext;\n from = state.wsize - copy;\n }\n else {\n from = state.wnext - copy;\n }\n if (copy > state.length) { copy = state.length; }\n from_source = state.window;\n }\n else { /* copy from output */\n from_source = output;\n from = put - state.offset;\n copy = state.length;\n }\n if (copy > left) { copy = left; }\n left -= copy;\n state.length -= copy;\n do {\n output[put++] = from_source[from++];\n } while (--copy);\n if (state.length === 0) { state.mode = LEN; }\n break;\n case LIT:\n if (left === 0) { break inf_leave; }\n output[put++] = state.length;\n left--;\n state.mode = LEN;\n break;\n case CHECK:\n if (state.wrap) {\n //=== NEEDBITS(32);\n while (bits < 32) {\n if (have === 0) { break inf_leave; }\n have--;\n // Use '|' instead of '+' to make sure that result is signed\n hold |= input[next++] << bits;\n bits += 8;\n }\n //===//\n _out -= left;\n strm.total_out += _out;\n state.total += _out;\n if (_out) {\n strm.adler = state.check =\n /*UPDATE(state.check, put - _out, _out);*/\n (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out));\n\n }\n _out = left;\n // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too\n if ((state.flags ? hold : zswap32(hold)) !== state.check) {\n strm.msg = 'incorrect data check';\n state.mode = BAD;\n break;\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n //Tracev((stderr, \"inflate: check matches trailer\\n\"));\n }\n state.mode = LENGTH;\n /* falls through */\n case LENGTH:\n if (state.wrap && state.flags) {\n //=== NEEDBITS(32);\n while (bits < 32) {\n if (have === 0) { break inf_leave; }\n have--;\n hold += input[next++] << bits;\n bits += 8;\n }\n //===//\n if (hold !== (state.total & 0xffffffff)) {\n strm.msg = 'incorrect length check';\n state.mode = BAD;\n break;\n }\n //=== INITBITS();\n hold = 0;\n bits = 0;\n //===//\n //Tracev((stderr, \"inflate: length matches trailer\\n\"));\n }\n state.mode = DONE;\n /* falls through */\n case DONE:\n ret = Z_STREAM_END;\n break inf_leave;\n case BAD:\n ret = Z_DATA_ERROR;\n break inf_leave;\n case MEM:\n return Z_MEM_ERROR;\n case SYNC:\n /* falls through */\n default:\n return Z_STREAM_ERROR;\n }\n }\n\n // inf_leave <- here is real place for \"goto inf_leave\", emulated via \"break inf_leave\"\n\n /*\n Return from inflate(), updating the total counts and the check value.\n If there was no progress during the inflate() call, return a buffer\n error. Call updatewindow() to create and/or update the window state.\n Note: a memory error from inflate() is non-recoverable.\n */\n\n //--- RESTORE() ---\n strm.next_out = put;\n strm.avail_out = left;\n strm.next_in = next;\n strm.avail_in = have;\n state.hold = hold;\n state.bits = bits;\n //---\n\n if (state.wsize || (_out !== strm.avail_out && state.mode < BAD &&\n (state.mode < CHECK || flush !== Z_FINISH))) {\n if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) {\n state.mode = MEM;\n return Z_MEM_ERROR;\n }\n }\n _in -= strm.avail_in;\n _out -= strm.avail_out;\n strm.total_in += _in;\n strm.total_out += _out;\n state.total += _out;\n if (state.wrap && _out) {\n strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/\n (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out));\n }\n strm.data_type = state.bits + (state.last ? 64 : 0) +\n (state.mode === TYPE ? 128 : 0) +\n (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0);\n if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) {\n ret = Z_BUF_ERROR;\n }\n return ret;\n}\n\nfunction inflateEnd(strm) {\n\n if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) {\n return Z_STREAM_ERROR;\n }\n\n var state = strm.state;\n if (state.window) {\n state.window = null;\n }\n strm.state = null;\n return Z_OK;\n}\n\nfunction inflateGetHeader(strm, head) {\n var state;\n\n /* check state */\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\n state = strm.state;\n if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; }\n\n /* save header structure */\n state.head = head;\n head.done = false;\n return Z_OK;\n}\n\nfunction inflateSetDictionary(strm, dictionary) {\n var dictLength = dictionary.length;\n\n var state;\n var dictid;\n var ret;\n\n /* check state */\n if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; }\n state = strm.state;\n\n if (state.wrap !== 0 && state.mode !== DICT) {\n return Z_STREAM_ERROR;\n }\n\n /* check for correct dictionary identifier */\n if (state.mode === DICT) {\n dictid = 1; /* adler32(0, null, 0)*/\n /* dictid = adler32(dictid, dictionary, dictLength); */\n dictid = adler32(dictid, dictionary, dictLength, 0);\n if (dictid !== state.check) {\n return Z_DATA_ERROR;\n }\n }\n /* copy dictionary to window using updatewindow(), which will amend the\n existing dictionary if appropriate */\n ret = updatewindow(strm, dictionary, dictLength, dictLength);\n if (ret) {\n state.mode = MEM;\n return Z_MEM_ERROR;\n }\n state.havedict = 1;\n // Tracev((stderr, \"inflate: dictionary set\\n\"));\n return Z_OK;\n}\n\nexports.inflateReset = inflateReset;\nexports.inflateReset2 = inflateReset2;\nexports.inflateResetKeep = inflateResetKeep;\nexports.inflateInit = inflateInit;\nexports.inflateInit2 = inflateInit2;\nexports.inflate = inflate;\nexports.inflateEnd = inflateEnd;\nexports.inflateGetHeader = inflateGetHeader;\nexports.inflateSetDictionary = inflateSetDictionary;\nexports.inflateInfo = 'pako inflate (from Nodeca project)';\n\n/* Not implemented\nexports.inflateCopy = inflateCopy;\nexports.inflateGetDictionary = inflateGetDictionary;\nexports.inflateMark = inflateMark;\nexports.inflatePrime = inflatePrime;\nexports.inflateSync = inflateSync;\nexports.inflateSyncPoint = inflateSyncPoint;\nexports.inflateUndermine = inflateUndermine;\n*/\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inflate.js?");
738
739/***/ }),
740
741/***/ "./node_modules/pako/lib/zlib/inftrees.js":
742/*!************************************************!*\
743 !*** ./node_modules/pako/lib/zlib/inftrees.js ***!
744 \************************************************/
745/*! no static exports found */
746/***/ (function(module, exports, __webpack_require__) {
747
748"use strict";
749eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nvar utils = __webpack_require__(/*! ../utils/common */ \"./node_modules/pako/lib/utils/common.js\");\n\nvar MAXBITS = 15;\nvar ENOUGH_LENS = 852;\nvar ENOUGH_DISTS = 592;\n//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);\n\nvar CODES = 0;\nvar LENS = 1;\nvar DISTS = 2;\n\nvar lbase = [ /* Length codes 257..285 base */\n 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,\n 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0\n];\n\nvar lext = [ /* Length codes 257..285 extra */\n 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,\n 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78\n];\n\nvar dbase = [ /* Distance codes 0..29 base */\n 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,\n 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,\n 8193, 12289, 16385, 24577, 0, 0\n];\n\nvar dext = [ /* Distance codes 0..29 extra */\n 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,\n 23, 23, 24, 24, 25, 25, 26, 26, 27, 27,\n 28, 28, 29, 29, 64, 64\n];\n\nmodule.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts)\n{\n var bits = opts.bits;\n //here = opts.here; /* table entry for duplication */\n\n var len = 0; /* a code's length in bits */\n var sym = 0; /* index of code symbols */\n var min = 0, max = 0; /* minimum and maximum code lengths */\n var root = 0; /* number of index bits for root table */\n var curr = 0; /* number of index bits for current table */\n var drop = 0; /* code bits to drop for sub-table */\n var left = 0; /* number of prefix codes available */\n var used = 0; /* code entries in table used */\n var huff = 0; /* Huffman code */\n var incr; /* for incrementing code, index */\n var fill; /* index for replicating entries */\n var low; /* low bits for current root entry */\n var mask; /* mask for low root bits */\n var next; /* next available space in table */\n var base = null; /* base value table to use */\n var base_index = 0;\n// var shoextra; /* extra bits table to use */\n var end; /* use base and extra for symbol > end */\n var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */\n var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */\n var extra = null;\n var extra_index = 0;\n\n var here_bits, here_op, here_val;\n\n /*\n Process a set of code lengths to create a canonical Huffman code. The\n code lengths are lens[0..codes-1]. Each length corresponds to the\n symbols 0..codes-1. The Huffman code is generated by first sorting the\n symbols by length from short to long, and retaining the symbol order\n for codes with equal lengths. Then the code starts with all zero bits\n for the first code of the shortest length, and the codes are integer\n increments for the same length, and zeros are appended as the length\n increases. For the deflate format, these bits are stored backwards\n from their more natural integer increment ordering, and so when the\n decoding tables are built in the large loop below, the integer codes\n are incremented backwards.\n\n This routine assumes, but does not check, that all of the entries in\n lens[] are in the range 0..MAXBITS. The caller must assure this.\n 1..MAXBITS is interpreted as that code length. zero means that that\n symbol does not occur in this code.\n\n The codes are sorted by computing a count of codes for each length,\n creating from that a table of starting indices for each length in the\n sorted table, and then entering the symbols in order in the sorted\n table. The sorted table is work[], with that space being provided by\n the caller.\n\n The length counts are used for other purposes as well, i.e. finding\n the minimum and maximum length codes, determining if there are any\n codes at all, checking for a valid set of lengths, and looking ahead\n at length counts to determine sub-table sizes when building the\n decoding tables.\n */\n\n /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */\n for (len = 0; len <= MAXBITS; len++) {\n count[len] = 0;\n }\n for (sym = 0; sym < codes; sym++) {\n count[lens[lens_index + sym]]++;\n }\n\n /* bound code lengths, force root to be within code lengths */\n root = bits;\n for (max = MAXBITS; max >= 1; max--) {\n if (count[max] !== 0) { break; }\n }\n if (root > max) {\n root = max;\n }\n if (max === 0) { /* no symbols to code at all */\n //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */\n //table.bits[opts.table_index] = 1; //here.bits = (var char)1;\n //table.val[opts.table_index++] = 0; //here.val = (var short)0;\n table[table_index++] = (1 << 24) | (64 << 16) | 0;\n\n\n //table.op[opts.table_index] = 64;\n //table.bits[opts.table_index] = 1;\n //table.val[opts.table_index++] = 0;\n table[table_index++] = (1 << 24) | (64 << 16) | 0;\n\n opts.bits = 1;\n return 0; /* no symbols, but wait for decoding to report error */\n }\n for (min = 1; min < max; min++) {\n if (count[min] !== 0) { break; }\n }\n if (root < min) {\n root = min;\n }\n\n /* check for an over-subscribed or incomplete set of lengths */\n left = 1;\n for (len = 1; len <= MAXBITS; len++) {\n left <<= 1;\n left -= count[len];\n if (left < 0) {\n return -1;\n } /* over-subscribed */\n }\n if (left > 0 && (type === CODES || max !== 1)) {\n return -1; /* incomplete set */\n }\n\n /* generate offsets into symbol table for each length for sorting */\n offs[1] = 0;\n for (len = 1; len < MAXBITS; len++) {\n offs[len + 1] = offs[len] + count[len];\n }\n\n /* sort symbols by length, by symbol order within each length */\n for (sym = 0; sym < codes; sym++) {\n if (lens[lens_index + sym] !== 0) {\n work[offs[lens[lens_index + sym]]++] = sym;\n }\n }\n\n /*\n Create and fill in decoding tables. In this loop, the table being\n filled is at next and has curr index bits. The code being used is huff\n with length len. That code is converted to an index by dropping drop\n bits off of the bottom. For codes where len is less than drop + curr,\n those top drop + curr - len bits are incremented through all values to\n fill the table with replicated entries.\n\n root is the number of index bits for the root table. When len exceeds\n root, sub-tables are created pointed to by the root entry with an index\n of the low root bits of huff. This is saved in low to check for when a\n new sub-table should be started. drop is zero when the root table is\n being filled, and drop is root when sub-tables are being filled.\n\n When a new sub-table is needed, it is necessary to look ahead in the\n code lengths to determine what size sub-table is needed. The length\n counts are used for this, and so count[] is decremented as codes are\n entered in the tables.\n\n used keeps track of how many table entries have been allocated from the\n provided *table space. It is checked for LENS and DIST tables against\n the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in\n the initial root table size constants. See the comments in inftrees.h\n for more information.\n\n sym increments through all symbols, and the loop terminates when\n all codes of length max, i.e. all codes, have been processed. This\n routine permits incomplete codes, so another loop after this one fills\n in the rest of the decoding tables with invalid code markers.\n */\n\n /* set up for code type */\n // poor man optimization - use if-else instead of switch,\n // to avoid deopts in old v8\n if (type === CODES) {\n base = extra = work; /* dummy value--not used */\n end = 19;\n\n } else if (type === LENS) {\n base = lbase;\n base_index -= 257;\n extra = lext;\n extra_index -= 257;\n end = 256;\n\n } else { /* DISTS */\n base = dbase;\n extra = dext;\n end = -1;\n }\n\n /* initialize opts for loop */\n huff = 0; /* starting code */\n sym = 0; /* starting code symbol */\n len = min; /* starting code length */\n next = table_index; /* current table to fill in */\n curr = root; /* current table index bits */\n drop = 0; /* current bits to drop from code for index */\n low = -1; /* trigger new sub-table when len > root */\n used = 1 << root; /* use root table entries */\n mask = used - 1; /* mask for comparing low */\n\n /* check available table space */\n if ((type === LENS && used > ENOUGH_LENS) ||\n (type === DISTS && used > ENOUGH_DISTS)) {\n return 1;\n }\n\n /* process all codes and make table entries */\n for (;;) {\n /* create table entry */\n here_bits = len - drop;\n if (work[sym] < end) {\n here_op = 0;\n here_val = work[sym];\n }\n else if (work[sym] > end) {\n here_op = extra[extra_index + work[sym]];\n here_val = base[base_index + work[sym]];\n }\n else {\n here_op = 32 + 64; /* end of block */\n here_val = 0;\n }\n\n /* replicate for those indices with low len bits equal to huff */\n incr = 1 << (len - drop);\n fill = 1 << curr;\n min = fill; /* save offset to next table */\n do {\n fill -= incr;\n table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0;\n } while (fill !== 0);\n\n /* backwards increment the len-bit code huff */\n incr = 1 << (len - 1);\n while (huff & incr) {\n incr >>= 1;\n }\n if (incr !== 0) {\n huff &= incr - 1;\n huff += incr;\n } else {\n huff = 0;\n }\n\n /* go to next symbol, update count, len */\n sym++;\n if (--count[len] === 0) {\n if (len === max) { break; }\n len = lens[lens_index + work[sym]];\n }\n\n /* create new sub-table if needed */\n if (len > root && (huff & mask) !== low) {\n /* if first time, transition to sub-tables */\n if (drop === 0) {\n drop = root;\n }\n\n /* increment past last table */\n next += min; /* here min is 1 << curr */\n\n /* determine length of next table */\n curr = len - drop;\n left = 1 << curr;\n while (curr + drop < max) {\n left -= count[curr + drop];\n if (left <= 0) { break; }\n curr++;\n left <<= 1;\n }\n\n /* check for enough space */\n used += 1 << curr;\n if ((type === LENS && used > ENOUGH_LENS) ||\n (type === DISTS && used > ENOUGH_DISTS)) {\n return 1;\n }\n\n /* point entry in root table to sub-table */\n low = huff & mask;\n /*table.op[low] = curr;\n table.bits[low] = root;\n table.val[low] = next - opts.table_index;*/\n table[low] = (root << 24) | (curr << 16) | (next - table_index) |0;\n }\n }\n\n /* fill in remaining table entry if code is incomplete (guaranteed to have\n at most one remaining entry, since if the code is incomplete, the\n maximum code length that was allowed to get this far is one bit) */\n if (huff !== 0) {\n //table.op[next + huff] = 64; /* invalid code marker */\n //table.bits[next + huff] = len - drop;\n //table.val[next + huff] = 0;\n table[next + huff] = ((len - drop) << 24) | (64 << 16) |0;\n }\n\n /* set return parameters */\n //opts.table_index += used;\n opts.bits = root;\n return 0;\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inftrees.js?");
750
751/***/ }),
752
753/***/ "./node_modules/pako/lib/zlib/messages.js":
754/*!************************************************!*\
755 !*** ./node_modules/pako/lib/zlib/messages.js ***!
756 \************************************************/
757/*! no static exports found */
758/***/ (function(module, exports, __webpack_require__) {
759
760"use strict";
761eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nmodule.exports = {\n 2: 'need dictionary', /* Z_NEED_DICT 2 */\n 1: 'stream end', /* Z_STREAM_END 1 */\n 0: '', /* Z_OK 0 */\n '-1': 'file error', /* Z_ERRNO (-1) */\n '-2': 'stream error', /* Z_STREAM_ERROR (-2) */\n '-3': 'data error', /* Z_DATA_ERROR (-3) */\n '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */\n '-5': 'buffer error', /* Z_BUF_ERROR (-5) */\n '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/messages.js?");
762
763/***/ }),
764
765/***/ "./node_modules/pako/lib/zlib/zstream.js":
766/*!***********************************************!*\
767 !*** ./node_modules/pako/lib/zlib/zstream.js ***!
768 \***********************************************/
769/*! no static exports found */
770/***/ (function(module, exports, __webpack_require__) {
771
772"use strict";
773eval("\n\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\n//\n// This software is provided 'as-is', without any express or implied\n// warranty. In no event will the authors be held liable for any damages\n// arising from the use of this software.\n//\n// Permission is granted to anyone to use this software for any purpose,\n// including commercial applications, and to alter it and redistribute it\n// freely, subject to the following restrictions:\n//\n// 1. The origin of this software must not be misrepresented; you must not\n// claim that you wrote the original software. If you use this software\n// in a product, an acknowledgment in the product documentation would be\n// appreciated but is not required.\n// 2. Altered source versions must be plainly marked as such, and must not be\n// misrepresented as being the original software.\n// 3. This notice may not be removed or altered from any source distribution.\n\nfunction ZStream() {\n /* next input byte */\n this.input = null; // JS specific, because we have no pointers\n this.next_in = 0;\n /* number of bytes available at input */\n this.avail_in = 0;\n /* total number of input bytes read so far */\n this.total_in = 0;\n /* next output byte should be put there */\n this.output = null; // JS specific, because we have no pointers\n this.next_out = 0;\n /* remaining free space at output */\n this.avail_out = 0;\n /* total number of bytes output so far */\n this.total_out = 0;\n /* last error message, NULL if no error */\n this.msg = ''/*Z_NULL*/;\n /* not visible by applications */\n this.state = null;\n /* best guess about the data type: binary or text */\n this.data_type = 2/*Z_UNKNOWN*/;\n /* adler32 value of the uncompressed data */\n this.adler = 0;\n}\n\nmodule.exports = ZStream;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/zstream.js?");
774
775/***/ }),
776
777/***/ "./node_modules/process-nextick-args/index.js":
778/*!****************************************************!*\
779 !*** ./node_modules/process-nextick-args/index.js ***!
780 \****************************************************/
781/*! no static exports found */
782/***/ (function(module, exports, __webpack_require__) {
783
784"use strict";
785eval("\n\nif (typeof process === 'undefined' ||\n !process.version ||\n process.version.indexOf('v0.') === 0 ||\n process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {\n module.exports = { nextTick: nextTick };\n} else {\n module.exports = process\n}\n\nfunction nextTick(fn, arg1, arg2, arg3) {\n if (typeof fn !== 'function') {\n throw new TypeError('\"callback\" argument must be a function');\n }\n var len = arguments.length;\n var args, i;\n switch (len) {\n case 0:\n case 1:\n return process.nextTick(fn);\n case 2:\n return process.nextTick(function afterTickOne() {\n fn.call(null, arg1);\n });\n case 3:\n return process.nextTick(function afterTickTwo() {\n fn.call(null, arg1, arg2);\n });\n case 4:\n return process.nextTick(function afterTickThree() {\n fn.call(null, arg1, arg2, arg3);\n });\n default:\n args = new Array(len - 1);\n i = 0;\n while (i < args.length) {\n args[i++] = arguments[i];\n }\n return process.nextTick(function afterTick() {\n fn.apply(null, args);\n });\n }\n}\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/process-nextick-args/index.js?");
786
787/***/ }),
788
789/***/ "./node_modules/readable-stream/lib/_stream_duplex.js":
790/*!************************************************************!*\
791 !*** ./node_modules/readable-stream/lib/_stream_duplex.js ***!
792 \************************************************************/
793/*! no static exports found */
794/***/ (function(module, exports, __webpack_require__) {
795
796"use strict";
797eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n\n\n/*<replacement>*/\n\nvar pna = __webpack_require__(/*! process-nextick-args */ \"./node_modules/process-nextick-args/index.js\");\n/*</replacement>*/\n\n/*<replacement>*/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) {\n keys.push(key);\n }return keys;\n};\n/*</replacement>*/\n\nmodule.exports = Duplex;\n\n/*<replacement>*/\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \"./node_modules/core-util-is/lib/util.js\"));\nutil.inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\");\n/*</replacement>*/\n\nvar Readable = __webpack_require__(/*! ./_stream_readable */ \"./node_modules/readable-stream/lib/_stream_readable.js\");\nvar Writable = __webpack_require__(/*! ./_stream_writable */ \"./node_modules/readable-stream/lib/_stream_writable.js\");\n\nutil.inherits(Duplex, Readable);\n\n{\n // avoid scope creep, the keys array can then be collected\n var keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n var method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\n\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n\n Readable.call(this, options);\n Writable.call(this, options);\n\n if (options && options.readable === false) this.readable = false;\n\n if (options && options.writable === false) this.writable = false;\n\n this.allowHalfOpen = true;\n if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;\n\n this.once('end', onend);\n}\n\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._writableState.highWaterMark;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // if we allow half-open state, or if the writable side ended,\n // then we're ok.\n if (this.allowHalfOpen || this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n pna.nextTick(onEndNT, this);\n}\n\nfunction onEndNT(self) {\n self.end();\n}\n\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n get: function () {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});\n\nDuplex.prototype._destroy = function (err, cb) {\n this.push(null);\n this.end();\n\n pna.nextTick(cb, err);\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_duplex.js?");
798
799/***/ }),
800
801/***/ "./node_modules/readable-stream/lib/_stream_passthrough.js":
802/*!*****************************************************************!*\
803 !*** ./node_modules/readable-stream/lib/_stream_passthrough.js ***!
804 \*****************************************************************/
805/*! no static exports found */
806/***/ (function(module, exports, __webpack_require__) {
807
808"use strict";
809eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n\n\nmodule.exports = PassThrough;\n\nvar Transform = __webpack_require__(/*! ./_stream_transform */ \"./node_modules/readable-stream/lib/_stream_transform.js\");\n\n/*<replacement>*/\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \"./node_modules/core-util-is/lib/util.js\"));\nutil.inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\");\n/*</replacement>*/\n\nutil.inherits(PassThrough, Transform);\n\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n\n Transform.call(this, options);\n}\n\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_passthrough.js?");
810
811/***/ }),
812
813/***/ "./node_modules/readable-stream/lib/_stream_readable.js":
814/*!**************************************************************!*\
815 !*** ./node_modules/readable-stream/lib/_stream_readable.js ***!
816 \**************************************************************/
817/*! no static exports found */
818/***/ (function(module, exports, __webpack_require__) {
819
820"use strict";
821eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\n/*<replacement>*/\n\nvar pna = __webpack_require__(/*! process-nextick-args */ \"./node_modules/process-nextick-args/index.js\");\n/*</replacement>*/\n\nmodule.exports = Readable;\n\n/*<replacement>*/\nvar isArray = __webpack_require__(/*! isarray */ \"./node_modules/isarray/index.js\");\n/*</replacement>*/\n\n/*<replacement>*/\nvar Duplex;\n/*</replacement>*/\n\nReadable.ReadableState = ReadableState;\n\n/*<replacement>*/\nvar EE = __webpack_require__(/*! events */ \"events\").EventEmitter;\n\nvar EElistenerCount = function (emitter, type) {\n return emitter.listeners(type).length;\n};\n/*</replacement>*/\n\n/*<replacement>*/\nvar Stream = __webpack_require__(/*! ./internal/streams/stream */ \"./node_modules/readable-stream/lib/internal/streams/stream.js\");\n/*</replacement>*/\n\n/*<replacement>*/\n\nvar Buffer = __webpack_require__(/*! safe-buffer */ \"./node_modules/readable-stream/node_modules/safe-buffer/index.js\").Buffer;\nvar OurUint8Array = global.Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/*</replacement>*/\n\n/*<replacement>*/\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \"./node_modules/core-util-is/lib/util.js\"));\nutil.inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\");\n/*</replacement>*/\n\n/*<replacement>*/\nvar debugUtil = __webpack_require__(/*! util */ \"util\");\nvar debug = void 0;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function () {};\n}\n/*</replacement>*/\n\nvar BufferList = __webpack_require__(/*! ./internal/streams/BufferList */ \"./node_modules/readable-stream/lib/internal/streams/BufferList.js\");\nvar destroyImpl = __webpack_require__(/*! ./internal/streams/destroy */ \"./node_modules/readable-stream/lib/internal/streams/destroy.js\");\nvar StringDecoder;\n\nutil.inherits(Readable, Stream);\n\nvar kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\n\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\n\nfunction ReadableState(options, stream) {\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n var isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n var hwm = options.highWaterMark;\n var readableHwm = options.readableHighWaterMark;\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\n\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;\n\n // cast to ints.\n this.highWaterMark = Math.floor(this.highWaterMark);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = __webpack_require__(/*! string_decoder/ */ \"./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js\").StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\n\nfunction Readable(options) {\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n\n if (!(this instanceof Readable)) return new Readable(options);\n\n this._readableState = new ReadableState(options, this);\n\n // legacy\n this.readable = true;\n\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n\n Stream.call(this);\n}\n\nObject.defineProperty(Readable.prototype, 'destroyed', {\n get: function () {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\n\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n this.push(null);\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\n\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n stream.emit('error', er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (addToFront) {\n if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n stream.emit('error', new Error('stream.push() after EOF'));\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n }\n }\n\n return needMoreData(state);\n}\n\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n stream.emit('data', chunk);\n stream.read(0);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\n\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new TypeError('Invalid non-string/buffer chunk');\n }\n return er;\n}\n\n// if it's past the high water mark, we can push in some more.\n// Also, if we have no data yet, we can stand some\n// more bytes. This is to work around cases where hwm=0,\n// such as the repl. Also, if the push() triggered a\n// readable event, and the user called read(largeNumber) such that\n// needReadable was set, then we ought to push more, so that another\n// 'readable' event will be triggered.\nfunction needMoreData(state) {\n return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);\n}\n\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = __webpack_require__(/*! string_decoder/ */ \"./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js\").StringDecoder;\n this._readableState.decoder = new StringDecoder(enc);\n this._readableState.encoding = enc;\n return this;\n};\n\n// Don't raise the hwm > 8MB\nvar MAX_HWM = 0x800000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n\n if (ret === null) {\n state.needReadable = true;\n n = 0;\n } else {\n state.length -= n;\n }\n\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n\n if (ret !== null) this.emit('data', ret);\n\n return ret;\n};\n\nfunction onEofChunk(stream, state) {\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n\n // emit 'readable' now to make sure it gets picked up.\n emitReadable(stream);\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);\n }\n}\n\nfunction emitReadable_(stream) {\n debug('emit readable');\n stream.emit('readable');\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n pna.nextTick(maybeReadMore_, stream, state);\n }\n}\n\nfunction maybeReadMore_(stream, state) {\n var len = state.length;\n while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;else len = state.length;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n this.emit('error', new Error('_read() is not implemented'));\n};\n\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);\n\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n\n // If the user pushes more data while we're writing to dest then we'll end up\n // in ondata again. However, we only want to increase awaitDrain once because\n // dest will only emit one 'drain' event for the multiple writes.\n // => Introduce a guard on increasing awaitDrain.\n var increasedAwaitDrain = false;\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n increasedAwaitDrain = false;\n var ret = dest.write(chunk);\n if (false === ret && !increasedAwaitDrain) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', src._readableState.awaitDrain);\n src._readableState.awaitDrain++;\n increasedAwaitDrain = true;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n\n return dest;\n};\n\nfunction pipeOnDrain(src) {\n return function () {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\n\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = { hasUnpiped: false };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n\n for (var i = 0; i < len; i++) {\n dests[i].emit('unpipe', this, unpipeInfo);\n }return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n\n dest.emit('unpipe', this, unpipeInfo);\n\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n var res = Stream.prototype.on.call(this, ev, fn);\n\n if (ev === 'data') {\n // Start flowing on next tick if stream isn't explicitly paused\n if (this._readableState.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n var state = this._readableState;\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.emittedReadable = false;\n if (!state.reading) {\n pna.nextTick(nReadingNextTick, this);\n } else if (state.length) {\n emitReadable(this);\n }\n }\n }\n\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\n\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n state.flowing = true;\n resume(this, state);\n }\n return this;\n};\n\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n pna.nextTick(resume_, stream, state);\n }\n}\n\nfunction resume_(stream, state) {\n if (!state.reading) {\n debug('resume read 0');\n stream.read(0);\n }\n\n state.resumeScheduled = false;\n state.awaitDrain = 0;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\n\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (false !== this._readableState.flowing) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n return this;\n};\n\nfunction flow(stream) {\n var state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null) {}\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var _this = this;\n\n var state = this._readableState;\n var paused = false;\n\n stream.on('end', function () {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) _this.push(chunk);\n }\n\n _this.push(null);\n });\n\n stream.on('data', function (chunk) {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n\n var ret = _this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function (method) {\n return function () {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = function (n) {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n\n return this;\n};\n\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._readableState.highWaterMark;\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = fromListPartial(n, state.buffer, state.decoder);\n }\n\n return ret;\n}\n\n// Extracts only enough buffered data to satisfy the amount requested.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromListPartial(n, list, hasStrings) {\n var ret;\n if (n < list.head.data.length) {\n // slice is the same for buffers and strings\n ret = list.head.data.slice(0, n);\n list.head.data = list.head.data.slice(n);\n } else if (n === list.head.data.length) {\n // first chunk is a perfect match\n ret = list.shift();\n } else {\n // result spans more than one buffer\n ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);\n }\n return ret;\n}\n\n// Copies a specified amount of characters from the list of buffered data\n// chunks.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction copyFromBufferString(n, list) {\n var p = list.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n var str = p.data;\n var nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) list.head = p.next;else list.head = list.tail = null;\n } else {\n list.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n list.length -= c;\n return ret;\n}\n\n// Copies a specified amount of bytes from the list of buffered data chunks.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction copyFromBuffer(n, list) {\n var ret = Buffer.allocUnsafe(n);\n var p = list.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n var buf = p.data;\n var nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) list.head = p.next;else list.head = list.tail = null;\n } else {\n list.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n list.length -= c;\n return ret;\n}\n\nfunction endReadable(stream) {\n var state = stream._readableState;\n\n // If we get here before consuming all the bytes, then that is a\n // bug in node. Should never happen.\n if (state.length > 0) throw new Error('\"endReadable()\" called on non-empty stream');\n\n if (!state.endEmitted) {\n state.ended = true;\n pna.nextTick(endReadableNT, state, stream);\n }\n}\n\nfunction endReadableNT(state, stream) {\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n }\n}\n\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_readable.js?");
822
823/***/ }),
824
825/***/ "./node_modules/readable-stream/lib/_stream_transform.js":
826/*!***************************************************************!*\
827 !*** ./node_modules/readable-stream/lib/_stream_transform.js ***!
828 \***************************************************************/
829/*! no static exports found */
830/***/ (function(module, exports, __webpack_require__) {
831
832"use strict";
833eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n\n\nmodule.exports = Transform;\n\nvar Duplex = __webpack_require__(/*! ./_stream_duplex */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n\n/*<replacement>*/\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \"./node_modules/core-util-is/lib/util.js\"));\nutil.inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\");\n/*</replacement>*/\n\nutil.inherits(Transform, Duplex);\n\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n\n var cb = ts.writecb;\n\n if (!cb) {\n return this.emit('error', new Error('write callback called multiple times'));\n }\n\n ts.writechunk = null;\n ts.writecb = null;\n\n if (data != null) // single equals check for both `null` and `undefined`\n this.push(data);\n\n cb(er);\n\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\n\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n\n Duplex.call(this, options);\n\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\n\nfunction prefinish() {\n var _this = this;\n\n if (typeof this._flush === 'function') {\n this._flush(function (er, data) {\n done(_this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\n\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n throw new Error('_transform() is not implemented');\n};\n\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n\n if (ts.writechunk !== null && ts.writecb && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\n\nTransform.prototype._destroy = function (err, cb) {\n var _this2 = this;\n\n Duplex.prototype._destroy.call(this, err, function (err2) {\n cb(err2);\n _this2.emit('close');\n });\n};\n\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n\n if (data != null) // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');\n\n if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');\n\n return stream.push(null);\n}\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_transform.js?");
834
835/***/ }),
836
837/***/ "./node_modules/readable-stream/lib/_stream_writable.js":
838/*!**************************************************************!*\
839 !*** ./node_modules/readable-stream/lib/_stream_writable.js ***!
840 \**************************************************************/
841/*! no static exports found */
842/***/ (function(module, exports, __webpack_require__) {
843
844"use strict";
845eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n\n\n/*<replacement>*/\n\nvar pna = __webpack_require__(/*! process-nextick-args */ \"./node_modules/process-nextick-args/index.js\");\n/*</replacement>*/\n\nmodule.exports = Writable;\n\n/* <replacement> */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n var _this = this;\n\n this.next = null;\n this.entry = null;\n this.finish = function () {\n onCorkedFinish(_this, state);\n };\n}\n/* </replacement> */\n\n/*<replacement>*/\nvar asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;\n/*</replacement>*/\n\n/*<replacement>*/\nvar Duplex;\n/*</replacement>*/\n\nWritable.WritableState = WritableState;\n\n/*<replacement>*/\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \"./node_modules/core-util-is/lib/util.js\"));\nutil.inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\");\n/*</replacement>*/\n\n/*<replacement>*/\nvar internalUtil = {\n deprecate: __webpack_require__(/*! util-deprecate */ \"./node_modules/util-deprecate/node.js\")\n};\n/*</replacement>*/\n\n/*<replacement>*/\nvar Stream = __webpack_require__(/*! ./internal/streams/stream */ \"./node_modules/readable-stream/lib/internal/streams/stream.js\");\n/*</replacement>*/\n\n/*<replacement>*/\n\nvar Buffer = __webpack_require__(/*! safe-buffer */ \"./node_modules/readable-stream/node_modules/safe-buffer/index.js\").Buffer;\nvar OurUint8Array = global.Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/*</replacement>*/\n\nvar destroyImpl = __webpack_require__(/*! ./internal/streams/destroy */ \"./node_modules/readable-stream/lib/internal/streams/destroy.js\");\n\nutil.inherits(Writable, Stream);\n\nfunction nop() {}\n\nfunction WritableState(options, stream) {\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n var isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n var hwm = options.highWaterMark;\n var writableHwm = options.writableHighWaterMark;\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\n\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;\n\n // cast to ints.\n this.highWaterMark = Math.floor(this.highWaterMark);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\n\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function () {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function (object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function (object) {\n return object instanceof this;\n };\n}\n\nfunction Writable(options) {\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {\n return new Writable(options);\n }\n\n this._writableState = new WritableState(options, this);\n\n // legacy.\n this.writable = true;\n\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n\n if (typeof options.writev === 'function') this._writev = options.writev;\n\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n\n if (typeof options.final === 'function') this._final = options.final;\n }\n\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n this.emit('error', new Error('Cannot pipe, not readable'));\n};\n\nfunction writeAfterEnd(stream, cb) {\n var er = new Error('write after end');\n // TODO: defer error events consistently everywhere, not just the cb\n stream.emit('error', er);\n pna.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var valid = true;\n var er = false;\n\n if (chunk === null) {\n er = new TypeError('May not write null values to stream');\n } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new TypeError('Invalid non-string/buffer chunk');\n }\n if (er) {\n stream.emit('error', er);\n pna.nextTick(cb, er);\n valid = false;\n }\n return valid;\n}\n\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n\n if (typeof cb !== 'function') cb = nop;\n\n if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n\n return ret;\n};\n\nWritable.prototype.cork = function () {\n var state = this._writableState;\n\n state.corked++;\n};\n\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n\n if (state.corked) {\n state.corked--;\n\n if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\n\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\n\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\n\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function () {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n\n state.length += len;\n\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk: chunk,\n encoding: encoding,\n isBuf: isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n\n return ret;\n}\n\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\n\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n pna.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n pna.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n stream.emit('error', er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n stream.emit('error', er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\n\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\n\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n\n onwriteStateUpdate(state);\n\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state);\n\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n\n if (sync) {\n /*<replacement>*/\n asyncWrite(afterWrite, stream, state, finished, cb);\n /*</replacement>*/\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\n\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n\n if (entry === null) state.lastBufferedRequest = null;\n }\n\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\n\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new Error('_write() is not implemented'));\n};\n\nWritable.prototype._writev = null;\n\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending && !state.finished) endWritable(this, state, cb);\n};\n\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(function (err) {\n state.pendingcb--;\n if (err) {\n stream.emit('error', err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function') {\n state.pendingcb++;\n state.finalCalled = true;\n pna.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\n\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n }\n }\n return need;\n}\n\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\n\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n if (state.corkedRequestsFree) {\n state.corkedRequestsFree.next = corkReq;\n } else {\n state.corkedRequestsFree = corkReq;\n }\n}\n\nObject.defineProperty(Writable.prototype, 'destroyed', {\n get: function () {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set: function (value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\n\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n this.end();\n cb(err);\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_writable.js?");
846
847/***/ }),
848
849/***/ "./node_modules/readable-stream/lib/internal/streams/BufferList.js":
850/*!*************************************************************************!*\
851 !*** ./node_modules/readable-stream/lib/internal/streams/BufferList.js ***!
852 \*************************************************************************/
853/*! no static exports found */
854/***/ (function(module, exports, __webpack_require__) {
855
856"use strict";
857eval("\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nvar Buffer = __webpack_require__(/*! safe-buffer */ \"./node_modules/readable-stream/node_modules/safe-buffer/index.js\").Buffer;\nvar util = __webpack_require__(/*! util */ \"util\");\n\nfunction copyBuffer(src, target, offset) {\n src.copy(target, offset);\n}\n\nmodule.exports = function () {\n function BufferList() {\n _classCallCheck(this, BufferList);\n\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n\n BufferList.prototype.push = function push(v) {\n var entry = { data: v, next: null };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n };\n\n BufferList.prototype.unshift = function unshift(v) {\n var entry = { data: v, next: this.head };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n };\n\n BufferList.prototype.shift = function shift() {\n if (this.length === 0) return;\n var ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n };\n\n BufferList.prototype.clear = function clear() {\n this.head = this.tail = null;\n this.length = 0;\n };\n\n BufferList.prototype.join = function join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) {\n ret += s + p.data;\n }return ret;\n };\n\n BufferList.prototype.concat = function concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n if (this.length === 1) return this.head.data;\n var ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n };\n\n return BufferList;\n}();\n\nif (util && util.inspect && util.inspect.custom) {\n module.exports.prototype[util.inspect.custom] = function () {\n var obj = util.inspect({ length: this.length });\n return this.constructor.name + ' ' + obj;\n };\n}\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/BufferList.js?");
858
859/***/ }),
860
861/***/ "./node_modules/readable-stream/lib/internal/streams/destroy.js":
862/*!**********************************************************************!*\
863 !*** ./node_modules/readable-stream/lib/internal/streams/destroy.js ***!
864 \**********************************************************************/
865/*! no static exports found */
866/***/ (function(module, exports, __webpack_require__) {
867
868"use strict";
869eval("\n\n/*<replacement>*/\n\nvar pna = __webpack_require__(/*! process-nextick-args */ \"./node_modules/process-nextick-args/index.js\");\n/*</replacement>*/\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n var _this = this;\n\n var readableDestroyed = this._readableState && this._readableState.destroyed;\n var writableDestroyed = this._writableState && this._writableState.destroyed;\n\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {\n pna.nextTick(emitErrorNT, this, err);\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n\n this._destroy(err || null, function (err) {\n if (!cb && err) {\n pna.nextTick(emitErrorNT, _this, err);\n if (_this._writableState) {\n _this._writableState.errorEmitted = true;\n }\n } else if (cb) {\n cb(err);\n }\n });\n\n return this;\n}\n\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\n\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\n\nmodule.exports = {\n destroy: destroy,\n undestroy: undestroy\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/destroy.js?");
870
871/***/ }),
872
873/***/ "./node_modules/readable-stream/lib/internal/streams/stream.js":
874/*!*********************************************************************!*\
875 !*** ./node_modules/readable-stream/lib/internal/streams/stream.js ***!
876 \*********************************************************************/
877/*! no static exports found */
878/***/ (function(module, exports, __webpack_require__) {
879
880eval("module.exports = __webpack_require__(/*! stream */ \"stream\");\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/stream.js?");
881
882/***/ }),
883
884/***/ "./node_modules/readable-stream/node_modules/safe-buffer/index.js":
885/*!************************************************************************!*\
886 !*** ./node_modules/readable-stream/node_modules/safe-buffer/index.js ***!
887 \************************************************************************/
888/*! no static exports found */
889/***/ (function(module, exports, __webpack_require__) {
890
891eval("/* eslint-disable node/no-deprecated-api */\nvar buffer = __webpack_require__(/*! buffer */ \"buffer\")\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/node_modules/safe-buffer/index.js?");
892
893/***/ }),
894
895/***/ "./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js":
896/*!****************************************************************************************!*\
897 !*** ./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js ***!
898 \****************************************************************************************/
899/*! no static exports found */
900/***/ (function(module, exports, __webpack_require__) {
901
902"use strict";
903eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\n/*<replacement>*/\n\nvar Buffer = __webpack_require__(/*! safe-buffer */ \"./node_modules/readable-stream/node_modules/safe-buffer/index.js\").Buffer;\n/*</replacement>*/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js?");
904
905/***/ }),
906
907/***/ "./node_modules/readable-stream/readable.js":
908/*!**************************************************!*\
909 !*** ./node_modules/readable-stream/readable.js ***!
910 \**************************************************/
911/*! no static exports found */
912/***/ (function(module, exports, __webpack_require__) {
913
914eval("var Stream = __webpack_require__(/*! stream */ \"stream\");\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream;\n exports = module.exports = Stream.Readable;\n exports.Readable = Stream.Readable;\n exports.Writable = Stream.Writable;\n exports.Duplex = Stream.Duplex;\n exports.Transform = Stream.Transform;\n exports.PassThrough = Stream.PassThrough;\n exports.Stream = Stream;\n} else {\n exports = module.exports = __webpack_require__(/*! ./lib/_stream_readable.js */ \"./node_modules/readable-stream/lib/_stream_readable.js\");\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = __webpack_require__(/*! ./lib/_stream_writable.js */ \"./node_modules/readable-stream/lib/_stream_writable.js\");\n exports.Duplex = __webpack_require__(/*! ./lib/_stream_duplex.js */ \"./node_modules/readable-stream/lib/_stream_duplex.js\");\n exports.Transform = __webpack_require__(/*! ./lib/_stream_transform.js */ \"./node_modules/readable-stream/lib/_stream_transform.js\");\n exports.PassThrough = __webpack_require__(/*! ./lib/_stream_passthrough.js */ \"./node_modules/readable-stream/lib/_stream_passthrough.js\");\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/readable.js?");
915
916/***/ }),
917
918/***/ "./node_modules/supports-color/index.js":
919/*!**********************************************!*\
920 !*** ./node_modules/supports-color/index.js ***!
921 \**********************************************/
922/*! no static exports found */
923/***/ (function(module, exports, __webpack_require__) {
924
925"use strict";
926eval("\nvar argv = process.argv;\n\nvar terminator = argv.indexOf('--');\nvar hasFlag = function (flag) {\n\tflag = '--' + flag;\n\tvar pos = argv.indexOf(flag);\n\treturn pos !== -1 && (terminator !== -1 ? pos < terminator : true);\n};\n\nmodule.exports = (function () {\n\tif ('FORCE_COLOR' in process.env) {\n\t\treturn true;\n\t}\n\n\tif (hasFlag('no-color') ||\n\t\thasFlag('no-colors') ||\n\t\thasFlag('color=false')) {\n\t\treturn false;\n\t}\n\n\tif (hasFlag('color') ||\n\t\thasFlag('colors') ||\n\t\thasFlag('color=true') ||\n\t\thasFlag('color=always')) {\n\t\treturn true;\n\t}\n\n\tif (process.stdout && !process.stdout.isTTY) {\n\t\treturn false;\n\t}\n\n\tif (process.platform === 'win32') {\n\t\treturn true;\n\t}\n\n\tif ('COLORTERM' in process.env) {\n\t\treturn true;\n\t}\n\n\tif (process.env.TERM === 'dumb') {\n\t\treturn false;\n\t}\n\n\tif (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) {\n\t\treturn true;\n\t}\n\n\treturn false;\n})();\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/supports-color/index.js?");
927
928/***/ }),
929
930/***/ "./node_modules/threads-plugin/dist/loader.js?{\"name\":\"0\"}!./node_modules/geotiff/src/decoder.worker.js":
931/*!**************************************************************************************************************!*\
932 !*** ./node_modules/threads-plugin/dist/loader.js?{"name":"0"}!./node_modules/geotiff/src/decoder.worker.js ***!
933 \**************************************************************************************************************/
934/*! no static exports found */
935/***/ (function(module, exports, __webpack_require__) {
936
937eval("module.exports = __webpack_require__.p + \"0.georaster.bundle.worker.js\"\n\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/decoder.worker.js?./node_modules/threads-plugin/dist/loader.js?%7B%22name%22:%220%22%7D");
938
939/***/ }),
940
941/***/ "./node_modules/threads/dist-esm/common.js":
942/*!*************************************************!*\
943 !*** ./node_modules/threads/dist-esm/common.js ***!
944 \*************************************************/
945/*! exports provided: registerSerializer, deserialize, serialize */
946/***/ (function(module, __webpack_exports__, __webpack_require__) {
947
948"use strict";
949eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"registerSerializer\", function() { return registerSerializer; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"deserialize\", function() { return deserialize; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"serialize\", function() { return serialize; });\n/* harmony import */ var _serializers__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./serializers */ \"./node_modules/threads/dist-esm/serializers.js\");\n\nlet registeredSerializer = _serializers__WEBPACK_IMPORTED_MODULE_0__[\"DefaultSerializer\"];\nfunction registerSerializer(serializer) {\n registeredSerializer = Object(_serializers__WEBPACK_IMPORTED_MODULE_0__[\"extendSerializer\"])(registeredSerializer, serializer);\n}\nfunction deserialize(message) {\n return registeredSerializer.deserialize(message);\n}\nfunction serialize(input) {\n return registeredSerializer.serialize(input);\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/common.js?");
950
951/***/ }),
952
953/***/ "./node_modules/threads/dist-esm/index.js":
954/*!************************************************!*\
955 !*** ./node_modules/threads/dist-esm/index.js ***!
956 \************************************************/
957/*! exports provided: registerSerializer, Pool, spawn, Thread, isWorkerRuntime, BlobWorker, Worker, expose, DefaultSerializer, Transfer */
958/***/ (function(module, __webpack_exports__, __webpack_require__) {
959
960"use strict";
961eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./common */ \"./node_modules/threads/dist-esm/common.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"registerSerializer\", function() { return _common__WEBPACK_IMPORTED_MODULE_0__[\"registerSerializer\"]; });\n\n/* harmony import */ var _master_index__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./master/index */ \"./node_modules/threads/dist-esm/master/index.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Pool\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"Pool\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"spawn\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"spawn\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Thread\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"Thread\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"isWorkerRuntime\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"BlobWorker\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"BlobWorker\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Worker\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\"Worker\"]; });\n\n/* harmony import */ var _worker_index__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./worker/index */ \"./node_modules/threads/dist-esm/worker/index.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"expose\", function() { return _worker_index__WEBPACK_IMPORTED_MODULE_2__[\"expose\"]; });\n\n/* harmony import */ var _serializers__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./serializers */ \"./node_modules/threads/dist-esm/serializers.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"DefaultSerializer\", function() { return _serializers__WEBPACK_IMPORTED_MODULE_3__[\"DefaultSerializer\"]; });\n\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./transferable */ \"./node_modules/threads/dist-esm/transferable.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Transfer\", function() { return _transferable__WEBPACK_IMPORTED_MODULE_4__[\"Transfer\"]; });\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/index.js?");
962
963/***/ }),
964
965/***/ "./node_modules/threads/dist-esm/master/get-bundle-url.browser.js":
966/*!************************************************************************!*\
967 !*** ./node_modules/threads/dist-esm/master/get-bundle-url.browser.js ***!
968 \************************************************************************/
969/*! exports provided: getBaseURL, getBundleURL */
970/***/ (function(module, __webpack_exports__, __webpack_require__) {
971
972"use strict";
973eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getBaseURL\", function() { return getBaseURL; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getBundleURL\", function() { return getBundleURLCached; });\n// Source: <https://github.com/parcel-bundler/parcel/blob/master/packages/core/parcel-bundler/src/builtins/bundle-url.js>\nlet bundleURL;\nfunction getBundleURLCached() {\n if (!bundleURL) {\n bundleURL = getBundleURL();\n }\n return bundleURL;\n}\nfunction getBundleURL() {\n // Attempt to find the URL of the current script and use that as the base URL\n try {\n throw new Error;\n }\n catch (err) {\n const matches = (\"\" + err.stack).match(/(https?|file|ftp|chrome-extension|moz-extension):\\/\\/[^)\\n]+/g);\n if (matches) {\n return getBaseURL(matches[0]);\n }\n }\n return \"/\";\n}\nfunction getBaseURL(url) {\n return (\"\" + url).replace(/^((?:https?|file|ftp|chrome-extension|moz-extension):\\/\\/.+)?\\/[^/]+(?:\\?.*)?$/, '$1') + '/';\n}\n\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/get-bundle-url.browser.js?");
974
975/***/ }),
976
977/***/ "./node_modules/threads/dist-esm/master/implementation.browser.js":
978/*!************************************************************************!*\
979 !*** ./node_modules/threads/dist-esm/master/implementation.browser.js ***!
980 \************************************************************************/
981/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */
982/***/ (function(module, __webpack_exports__, __webpack_require__) {
983
984"use strict";
985eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"defaultPoolSize\", function() { return defaultPoolSize; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getWorkerImplementation\", function() { return getWorkerImplementation; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return isWorkerRuntime; });\n/* harmony import */ var _get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./get-bundle-url.browser */ \"./node_modules/threads/dist-esm/master/get-bundle-url.browser.js\");\n// tslint:disable max-classes-per-file\n\nconst defaultPoolSize = typeof navigator !== \"undefined\" && navigator.hardwareConcurrency\n ? navigator.hardwareConcurrency\n : 4;\nconst isAbsoluteURL = (value) => /^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.test(value);\nfunction createSourceBlobURL(code) {\n const blob = new Blob([code], { type: \"application/javascript\" });\n return URL.createObjectURL(blob);\n}\nfunction selectWorkerImplementation() {\n if (typeof Worker === \"undefined\") {\n // Might happen on Safari, for instance\n // The idea is to only fail if the constructor is actually used\n return class NoWebWorker {\n constructor() {\n throw Error(\"No web worker implementation available. You might have tried to spawn a worker within a worker in a browser that doesn't support workers in workers.\");\n }\n };\n }\n class WebWorker extends Worker {\n constructor(url, options) {\n var _a, _b;\n if (typeof url === \"string\" && options && options._baseURL) {\n url = new URL(url, options._baseURL);\n }\n else if (typeof url === \"string\" && !isAbsoluteURL(url) && Object(_get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__[\"getBundleURL\"])().match(/^file:\\/\\//i)) {\n url = new URL(url, Object(_get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__[\"getBundleURL\"])().replace(/\\/[^\\/]+$/, \"/\"));\n if ((_a = options === null || options === void 0 ? void 0 : options.CORSWorkaround) !== null && _a !== void 0 ? _a : true) {\n url = createSourceBlobURL(`importScripts(${JSON.stringify(url)});`);\n }\n }\n if (typeof url === \"string\" && isAbsoluteURL(url)) {\n // Create source code blob loading JS file via `importScripts()`\n // to circumvent worker CORS restrictions\n if ((_b = options === null || options === void 0 ? void 0 : options.CORSWorkaround) !== null && _b !== void 0 ? _b : true) {\n url = createSourceBlobURL(`importScripts(${JSON.stringify(url)});`);\n }\n }\n super(url, options);\n }\n }\n class BlobWorker extends WebWorker {\n constructor(blob, options) {\n const url = window.URL.createObjectURL(blob);\n super(url, options);\n }\n static fromText(source, options) {\n const blob = new window.Blob([source], { type: \"text/javascript\" });\n return new BlobWorker(blob, options);\n }\n }\n return {\n blob: BlobWorker,\n default: WebWorker\n };\n}\nlet implementation;\nfunction getWorkerImplementation() {\n if (!implementation) {\n implementation = selectWorkerImplementation();\n }\n return implementation;\n}\nfunction isWorkerRuntime() {\n const isWindowContext = typeof self !== \"undefined\" && typeof Window !== \"undefined\" && self instanceof Window;\n return typeof self !== \"undefined\" && self.postMessage && !isWindowContext ? true : false;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.browser.js?");
986
987/***/ }),
988
989/***/ "./node_modules/threads/dist-esm/master/implementation.js":
990/*!****************************************************************!*\
991 !*** ./node_modules/threads/dist-esm/master/implementation.js ***!
992 \****************************************************************/
993/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */
994/***/ (function(module, __webpack_exports__, __webpack_require__) {
995
996"use strict";
997eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"defaultPoolSize\", function() { return defaultPoolSize; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getWorkerImplementation\", function() { return getWorkerImplementation; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return isWorkerRuntime; });\n/* harmony import */ var _implementation_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation.browser */ \"./node_modules/threads/dist-esm/master/implementation.browser.js\");\n/* harmony import */ var _implementation_node__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./implementation.node */ \"./node_modules/threads/dist-esm/master/implementation.node.js\");\n/*\n * This file is only a stub to make './implementation' resolve to the right module.\n */\n// We alias `src/master/implementation` to `src/master/implementation.browser` for web\n// browsers already in the package.json, so if get here, it's safe to pass-through the\n// node implementation\n\n\nconst runningInNode = typeof process !== 'undefined' && process.arch !== 'browser' && 'pid' in process;\nconst implementation = runningInNode ? _implementation_node__WEBPACK_IMPORTED_MODULE_1__ : _implementation_browser__WEBPACK_IMPORTED_MODULE_0__;\n/** Default size of pools. Depending on the platform the value might vary from device to device. */\nconst defaultPoolSize = implementation.defaultPoolSize;\nconst getWorkerImplementation = implementation.getWorkerImplementation;\n/** Returns `true` if this code is currently running in a worker. */\nconst isWorkerRuntime = implementation.isWorkerRuntime;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.js?");
998
999/***/ }),
1000
1001/***/ "./node_modules/threads/dist-esm/master/implementation.node.js":
1002/*!*********************************************************************!*\
1003 !*** ./node_modules/threads/dist-esm/master/implementation.node.js ***!
1004 \*********************************************************************/
1005/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */
1006/***/ (function(module, __webpack_exports__, __webpack_require__) {
1007
1008"use strict";
1009eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"defaultPoolSize\", function() { return defaultPoolSize; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"getWorkerImplementation\", function() { return getWorkerImplementation; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return isWorkerRuntime; });\n/* harmony import */ var callsites__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! callsites */ \"./node_modules/callsites/index.js\");\n/* harmony import */ var callsites__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(callsites__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! events */ \"events\");\n/* harmony import */ var events__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(events__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var os__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! os */ \"os\");\n/* harmony import */ var os__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(os__WEBPACK_IMPORTED_MODULE_2__);\n/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! path */ \"path\");\n/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_3__);\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! url */ \"url\");\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(url__WEBPACK_IMPORTED_MODULE_4__);\n/// <reference lib=\"dom\" />\n// tslint:disable function-constructor no-eval no-duplicate-super max-classes-per-file\n\n\n\n\n\nlet tsNodeAvailable;\nconst defaultPoolSize = Object(os__WEBPACK_IMPORTED_MODULE_2__[\"cpus\"])().length;\nfunction detectTsNode() {\n if (typeof require === \"function\") {\n // Webpack build: => No ts-node required or possible\n return false;\n }\n if (tsNodeAvailable) {\n return tsNodeAvailable;\n }\n try {\n eval(\"require\").resolve(\"ts-node\");\n tsNodeAvailable = true;\n }\n catch (error) {\n if (error && error.code === \"MODULE_NOT_FOUND\") {\n tsNodeAvailable = false;\n }\n else {\n // Re-throw\n throw error;\n }\n }\n return tsNodeAvailable;\n}\nfunction createTsNodeModule(scriptPath) {\n const content = `\n require(\"ts-node/register/transpile-only\");\n require(${JSON.stringify(scriptPath)});\n `;\n return content;\n}\nfunction rebaseScriptPath(scriptPath, ignoreRegex) {\n const parentCallSite = callsites__WEBPACK_IMPORTED_MODULE_0___default()().find((callsite) => {\n const filename = callsite.getFileName();\n return Boolean(filename &&\n !filename.match(ignoreRegex) &&\n !filename.match(/[\\/\\\\]master[\\/\\\\]implementation/) &&\n !filename.match(/^internal\\/process/));\n });\n const rawCallerPath = parentCallSite ? parentCallSite.getFileName() : null;\n let callerPath = rawCallerPath ? rawCallerPath : null;\n if (callerPath && callerPath.startsWith('file:')) {\n callerPath = Object(url__WEBPACK_IMPORTED_MODULE_4__[\"fileURLToPath\"])(callerPath);\n }\n const rebasedScriptPath = callerPath ? path__WEBPACK_IMPORTED_MODULE_3__[\"join\"](path__WEBPACK_IMPORTED_MODULE_3__[\"dirname\"](callerPath), scriptPath) : scriptPath;\n return rebasedScriptPath;\n}\nfunction resolveScriptPath(scriptPath, baseURL) {\n const makeRelative = (filePath) => {\n // eval() hack is also webpack-related\n return path__WEBPACK_IMPORTED_MODULE_3__[\"isAbsolute\"](filePath) ? filePath : path__WEBPACK_IMPORTED_MODULE_3__[\"join\"](baseURL || eval(\"__dirname\"), filePath);\n };\n const workerFilePath = typeof require === \"function\"\n ? require.resolve(makeRelative(scriptPath))\n : eval(\"require\").resolve(makeRelative(rebaseScriptPath(scriptPath, /[\\/\\\\]worker_threads[\\/\\\\]/)));\n return workerFilePath;\n}\nfunction initWorkerThreadsWorker() {\n // Webpack hack\n const NativeWorker = typeof require === \"function\"\n ? require(\"worker_threads\").Worker\n : eval(\"require\")(\"worker_threads\").Worker;\n let allWorkers = [];\n class Worker extends NativeWorker {\n constructor(scriptPath, options) {\n const resolvedScriptPath = options && options.fromSource\n ? null\n : resolveScriptPath(scriptPath, (options || {})._baseURL);\n if (!resolvedScriptPath) {\n // `options.fromSource` is true\n const sourceCode = scriptPath;\n super(sourceCode, Object.assign(Object.assign({}, options), { eval: true }));\n }\n else if (resolvedScriptPath.match(/\\.tsx?$/i) && detectTsNode()) {\n super(createTsNodeModule(resolvedScriptPath), Object.assign(Object.assign({}, options), { eval: true }));\n }\n else if (resolvedScriptPath.match(/\\.asar[\\/\\\\]/)) {\n // See <https://github.com/andywer/threads-plugin/issues/17>\n super(resolvedScriptPath.replace(/\\.asar([\\/\\\\])/, \".asar.unpacked$1\"), options);\n }\n else {\n super(resolvedScriptPath, options);\n }\n this.mappedEventListeners = new WeakMap();\n allWorkers.push(this);\n }\n addEventListener(eventName, rawListener) {\n const listener = (message) => {\n rawListener({ data: message });\n };\n this.mappedEventListeners.set(rawListener, listener);\n this.on(eventName, listener);\n }\n removeEventListener(eventName, rawListener) {\n const listener = this.mappedEventListeners.get(rawListener) || rawListener;\n this.off(eventName, listener);\n }\n }\n const terminateWorkersAndMaster = () => {\n // we should terminate all workers and then gracefully shutdown self process\n Promise.all(allWorkers.map(worker => worker.terminate())).then(() => process.exit(0), () => process.exit(1));\n allWorkers = [];\n };\n // Take care to not leave orphaned processes behind. See #147.\n process.on(\"SIGINT\", () => terminateWorkersAndMaster());\n process.on(\"SIGTERM\", () => terminateWorkersAndMaster());\n class BlobWorker extends Worker {\n constructor(blob, options) {\n super(Buffer.from(blob).toString(\"utf-8\"), Object.assign(Object.assign({}, options), { fromSource: true }));\n }\n static fromText(source, options) {\n return new Worker(source, Object.assign(Object.assign({}, options), { fromSource: true }));\n }\n }\n return {\n blob: BlobWorker,\n default: Worker\n };\n}\nfunction initTinyWorker() {\n const TinyWorker = __webpack_require__(/*! tiny-worker */ \"./node_modules/tiny-worker/lib/index.js\");\n let allWorkers = [];\n class Worker extends TinyWorker {\n constructor(scriptPath, options) {\n // Need to apply a work-around for Windows or it will choke upon the absolute path\n // (`Error [ERR_INVALID_PROTOCOL]: Protocol 'c:' not supported`)\n const resolvedScriptPath = options && options.fromSource\n ? null\n : process.platform === \"win32\"\n ? `file:///${resolveScriptPath(scriptPath).replace(/\\\\/g, \"/\")}`\n : resolveScriptPath(scriptPath);\n if (!resolvedScriptPath) {\n // `options.fromSource` is true\n const sourceCode = scriptPath;\n super(new Function(sourceCode), [], { esm: true });\n }\n else if (resolvedScriptPath.match(/\\.tsx?$/i) && detectTsNode()) {\n super(new Function(createTsNodeModule(resolveScriptPath(scriptPath))), [], { esm: true });\n }\n else if (resolvedScriptPath.match(/\\.asar[\\/\\\\]/)) {\n // See <https://github.com/andywer/threads-plugin/issues/17>\n super(resolvedScriptPath.replace(/\\.asar([\\/\\\\])/, \".asar.unpacked$1\"), [], { esm: true });\n }\n else {\n super(resolvedScriptPath, [], { esm: true });\n }\n allWorkers.push(this);\n this.emitter = new events__WEBPACK_IMPORTED_MODULE_1__[\"EventEmitter\"]();\n this.onerror = (error) => this.emitter.emit(\"error\", error);\n this.onmessage = (message) => this.emitter.emit(\"message\", message);\n }\n addEventListener(eventName, listener) {\n this.emitter.addListener(eventName, listener);\n }\n removeEventListener(eventName, listener) {\n this.emitter.removeListener(eventName, listener);\n }\n terminate() {\n allWorkers = allWorkers.filter(worker => worker !== this);\n return super.terminate();\n }\n }\n const terminateWorkersAndMaster = () => {\n // we should terminate all workers and then gracefully shutdown self process\n Promise.all(allWorkers.map(worker => worker.terminate())).then(() => process.exit(0), () => process.exit(1));\n allWorkers = [];\n };\n // Take care to not leave orphaned processes behind\n // See <https://github.com/avoidwork/tiny-worker#faq>\n process.on(\"SIGINT\", () => terminateWorkersAndMaster());\n process.on(\"SIGTERM\", () => terminateWorkersAndMaster());\n class BlobWorker extends Worker {\n constructor(blob, options) {\n super(Buffer.from(blob).toString(\"utf-8\"), Object.assign(Object.assign({}, options), { fromSource: true }));\n }\n static fromText(source, options) {\n return new Worker(source, Object.assign(Object.assign({}, options), { fromSource: true }));\n }\n }\n return {\n blob: BlobWorker,\n default: Worker\n };\n}\nlet implementation;\nlet isTinyWorker;\nfunction selectWorkerImplementation() {\n try {\n isTinyWorker = false;\n return initWorkerThreadsWorker();\n }\n catch (error) {\n // tslint:disable-next-line no-console\n console.debug(\"Node worker_threads not available. Trying to fall back to tiny-worker polyfill...\");\n isTinyWorker = true;\n return initTinyWorker();\n }\n}\nfunction getWorkerImplementation() {\n if (!implementation) {\n implementation = selectWorkerImplementation();\n }\n return implementation;\n}\nfunction isWorkerRuntime() {\n if (isTinyWorker) {\n return typeof self !== \"undefined\" && self.postMessage ? true : false;\n }\n else {\n // Webpack hack\n const isMainThread = typeof require === \"function\"\n ? require(\"worker_threads\").isMainThread\n : eval(\"require\")(\"worker_threads\").isMainThread;\n return !isMainThread;\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.node.js?");
1010
1011/***/ }),
1012
1013/***/ "./node_modules/threads/dist-esm/master/index.js":
1014/*!*******************************************************!*\
1015 !*** ./node_modules/threads/dist-esm/master/index.js ***!
1016 \*******************************************************/
1017/*! exports provided: Pool, spawn, Thread, isWorkerRuntime, BlobWorker, Worker */
1018/***/ (function(module, __webpack_exports__, __webpack_require__) {
1019
1020"use strict";
1021eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"BlobWorker\", function() { return BlobWorker; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Worker\", function() { return Worker; });\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation */ \"./node_modules/threads/dist-esm/master/implementation.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return _implementation__WEBPACK_IMPORTED_MODULE_0__[\"isWorkerRuntime\"]; });\n\n/* harmony import */ var _pool__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./pool */ \"./node_modules/threads/dist-esm/master/pool.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Pool\", function() { return _pool__WEBPACK_IMPORTED_MODULE_1__[\"Pool\"]; });\n\n/* harmony import */ var _spawn__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./spawn */ \"./node_modules/threads/dist-esm/master/spawn.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"spawn\", function() { return _spawn__WEBPACK_IMPORTED_MODULE_2__[\"spawn\"]; });\n\n/* harmony import */ var _thread__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./thread */ \"./node_modules/threads/dist-esm/master/thread.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Thread\", function() { return _thread__WEBPACK_IMPORTED_MODULE_3__[\"Thread\"]; });\n\n\n\n\n\n\n/** Separate class to spawn workers from source code blobs or strings. */\nconst BlobWorker = Object(_implementation__WEBPACK_IMPORTED_MODULE_0__[\"getWorkerImplementation\"])().blob;\n/** Worker implementation. Either web worker or a node.js Worker class. */\nconst Worker = Object(_implementation__WEBPACK_IMPORTED_MODULE_0__[\"getWorkerImplementation\"])().default;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/index.js?");
1022
1023/***/ }),
1024
1025/***/ "./node_modules/threads/dist-esm/master/invocation-proxy.js":
1026/*!******************************************************************!*\
1027 !*** ./node_modules/threads/dist-esm/master/invocation-proxy.js ***!
1028 \******************************************************************/
1029/*! exports provided: createProxyFunction, createProxyModule */
1030/***/ (function(module, __webpack_exports__, __webpack_require__) {
1031
1032"use strict";
1033eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"createProxyFunction\", function() { return createProxyFunction; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"createProxyModule\", function() { return createProxyModule; });\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \"./node_modules/threads/node_modules/debug/src/index.js\");\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \"./node_modules/observable-fns/dist.esm/index.js\");\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../common */ \"./node_modules/threads/dist-esm/common.js\");\n/* harmony import */ var _observable_promise__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../observable-promise */ \"./node_modules/threads/dist-esm/observable-promise.js\");\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../transferable */ \"./node_modules/threads/dist-esm/transferable.js\");\n/* harmony import */ var _types_messages__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/messages */ \"./node_modules/threads/dist-esm/types/messages.js\");\n/*\n * This source file contains the code for proxying calls in the master thread to calls in the workers\n * by `.postMessage()`-ing.\n *\n * Keep in mind that this code can make or break the program's performance! Need to optimize more…\n */\n\n\n\n\n\n\nconst debugMessages = debug__WEBPACK_IMPORTED_MODULE_0___default()(\"threads:master:messages\");\nlet nextJobUID = 1;\nconst dedupe = (array) => Array.from(new Set(array));\nconst isJobErrorMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\"WorkerMessageType\"].error;\nconst isJobResultMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\"WorkerMessageType\"].result;\nconst isJobStartMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\"WorkerMessageType\"].running;\nfunction createObservableForJob(worker, jobUID) {\n return new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"Observable\"](observer => {\n let asyncType;\n const messageHandler = ((event) => {\n debugMessages(\"Message from worker:\", event.data);\n if (!event.data || event.data.uid !== jobUID)\n return;\n if (isJobStartMessage(event.data)) {\n asyncType = event.data.resultType;\n }\n else if (isJobResultMessage(event.data)) {\n if (asyncType === \"promise\") {\n if (typeof event.data.payload !== \"undefined\") {\n observer.next(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"deserialize\"])(event.data.payload));\n }\n observer.complete();\n worker.removeEventListener(\"message\", messageHandler);\n }\n else {\n if (event.data.payload) {\n observer.next(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"deserialize\"])(event.data.payload));\n }\n if (event.data.complete) {\n observer.complete();\n worker.removeEventListener(\"message\", messageHandler);\n }\n }\n }\n else if (isJobErrorMessage(event.data)) {\n const error = Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"deserialize\"])(event.data.error);\n if (asyncType === \"promise\" || !asyncType) {\n observer.error(error);\n }\n else {\n observer.error(error);\n }\n worker.removeEventListener(\"message\", messageHandler);\n }\n });\n worker.addEventListener(\"message\", messageHandler);\n return () => {\n if (asyncType === \"observable\" || !asyncType) {\n const cancelMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_5__[\"MasterMessageType\"].cancel,\n uid: jobUID\n };\n worker.postMessage(cancelMessage);\n }\n worker.removeEventListener(\"message\", messageHandler);\n };\n });\n}\nfunction prepareArguments(rawArgs) {\n if (rawArgs.length === 0) {\n // Exit early if possible\n return {\n args: [],\n transferables: []\n };\n }\n const args = [];\n const transferables = [];\n for (const arg of rawArgs) {\n if (Object(_transferable__WEBPACK_IMPORTED_MODULE_4__[\"isTransferDescriptor\"])(arg)) {\n args.push(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"serialize\"])(arg.send));\n transferables.push(...arg.transferables);\n }\n else {\n args.push(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"serialize\"])(arg));\n }\n }\n return {\n args,\n transferables: transferables.length === 0 ? transferables : dedupe(transferables)\n };\n}\nfunction createProxyFunction(worker, method) {\n return ((...rawArgs) => {\n const uid = nextJobUID++;\n const { args, transferables } = prepareArguments(rawArgs);\n const runMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_5__[\"MasterMessageType\"].run,\n uid,\n method,\n args\n };\n debugMessages(\"Sending command to run function to worker:\", runMessage);\n try {\n worker.postMessage(runMessage, transferables);\n }\n catch (error) {\n return _observable_promise__WEBPACK_IMPORTED_MODULE_3__[\"ObservablePromise\"].from(Promise.reject(error));\n }\n return _observable_promise__WEBPACK_IMPORTED_MODULE_3__[\"ObservablePromise\"].from(Object(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"multicast\"])(createObservableForJob(worker, uid)));\n });\n}\nfunction createProxyModule(worker, methodNames) {\n const proxy = {};\n for (const methodName of methodNames) {\n proxy[methodName] = createProxyFunction(worker, methodName);\n }\n return proxy;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/invocation-proxy.js?");
1034
1035/***/ }),
1036
1037/***/ "./node_modules/threads/dist-esm/master/pool-types.js":
1038/*!************************************************************!*\
1039 !*** ./node_modules/threads/dist-esm/master/pool-types.js ***!
1040 \************************************************************/
1041/*! exports provided: PoolEventType */
1042/***/ (function(module, __webpack_exports__, __webpack_require__) {
1043
1044"use strict";
1045eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"PoolEventType\", function() { return PoolEventType; });\n/** Pool event type. Specifies the type of each `PoolEvent`. */\nvar PoolEventType;\n(function (PoolEventType) {\n PoolEventType[\"initialized\"] = \"initialized\";\n PoolEventType[\"taskCanceled\"] = \"taskCanceled\";\n PoolEventType[\"taskCompleted\"] = \"taskCompleted\";\n PoolEventType[\"taskFailed\"] = \"taskFailed\";\n PoolEventType[\"taskQueued\"] = \"taskQueued\";\n PoolEventType[\"taskQueueDrained\"] = \"taskQueueDrained\";\n PoolEventType[\"taskStart\"] = \"taskStart\";\n PoolEventType[\"terminated\"] = \"terminated\";\n})(PoolEventType || (PoolEventType = {}));\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/pool-types.js?");
1046
1047/***/ }),
1048
1049/***/ "./node_modules/threads/dist-esm/master/pool.js":
1050/*!******************************************************!*\
1051 !*** ./node_modules/threads/dist-esm/master/pool.js ***!
1052 \******************************************************/
1053/*! exports provided: PoolEventType, Thread, Pool */
1054/***/ (function(module, __webpack_exports__, __webpack_require__) {
1055
1056"use strict";
1057eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Pool\", function() { return Pool; });\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \"./node_modules/threads/node_modules/debug/src/index.js\");\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \"./node_modules/observable-fns/dist.esm/index.js\");\n/* harmony import */ var _ponyfills__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../ponyfills */ \"./node_modules/threads/dist-esm/ponyfills.js\");\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./implementation */ \"./node_modules/threads/dist-esm/master/implementation.js\");\n/* harmony import */ var _pool_types__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./pool-types */ \"./node_modules/threads/dist-esm/master/pool-types.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"PoolEventType\", function() { return _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"]; });\n\n/* harmony import */ var _thread__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./thread */ \"./node_modules/threads/dist-esm/master/thread.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Thread\", function() { return _thread__WEBPACK_IMPORTED_MODULE_5__[\"Thread\"]; });\n\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\n\n\n\n\nlet nextPoolID = 1;\nfunction createArray(size) {\n const array = [];\n for (let index = 0; index < size; index++) {\n array.push(index);\n }\n return array;\n}\nfunction delay(ms) {\n return new Promise(resolve => setTimeout(resolve, ms));\n}\nfunction flatMap(array, mapper) {\n return array.reduce((flattened, element) => [...flattened, ...mapper(element)], []);\n}\nfunction slugify(text) {\n return text.replace(/\\W/g, \" \").trim().replace(/\\s+/g, \"-\");\n}\nfunction spawnWorkers(spawnWorker, count) {\n return createArray(count).map(() => ({\n init: spawnWorker(),\n runningTasks: []\n }));\n}\nclass WorkerPool {\n constructor(spawnWorker, optionsOrSize) {\n this.eventSubject = new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"Subject\"]();\n this.initErrors = [];\n this.isClosing = false;\n this.nextTaskID = 1;\n this.taskQueue = [];\n const options = typeof optionsOrSize === \"number\"\n ? { size: optionsOrSize }\n : optionsOrSize || {};\n const { size = _implementation__WEBPACK_IMPORTED_MODULE_3__[\"defaultPoolSize\"] } = options;\n this.debug = debug__WEBPACK_IMPORTED_MODULE_0___default()(`threads:pool:${slugify(options.name || String(nextPoolID++))}`);\n this.options = options;\n this.workers = spawnWorkers(spawnWorker, size);\n this.eventObservable = Object(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"multicast\"])(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"Observable\"].from(this.eventSubject));\n Promise.all(this.workers.map(worker => worker.init)).then(() => this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].initialized,\n size: this.workers.length\n }), error => {\n this.debug(\"Error while initializing pool worker:\", error);\n this.eventSubject.error(error);\n this.initErrors.push(error);\n });\n }\n findIdlingWorker() {\n const { concurrency = 1 } = this.options;\n return this.workers.find(worker => worker.runningTasks.length < concurrency);\n }\n runPoolTask(worker, task) {\n return __awaiter(this, void 0, void 0, function* () {\n const workerID = this.workers.indexOf(worker) + 1;\n this.debug(`Running task #${task.id} on worker #${workerID}...`);\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskStart,\n taskID: task.id,\n workerID\n });\n try {\n const returnValue = yield task.run(yield worker.init);\n this.debug(`Task #${task.id} completed successfully`);\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskCompleted,\n returnValue,\n taskID: task.id,\n workerID\n });\n }\n catch (error) {\n this.debug(`Task #${task.id} failed`);\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskFailed,\n taskID: task.id,\n error,\n workerID\n });\n }\n });\n }\n run(worker, task) {\n return __awaiter(this, void 0, void 0, function* () {\n const runPromise = (() => __awaiter(this, void 0, void 0, function* () {\n const removeTaskFromWorkersRunningTasks = () => {\n worker.runningTasks = worker.runningTasks.filter(someRunPromise => someRunPromise !== runPromise);\n };\n // Defer task execution by one tick to give handlers time to subscribe\n yield delay(0);\n try {\n yield this.runPoolTask(worker, task);\n }\n finally {\n removeTaskFromWorkersRunningTasks();\n if (!this.isClosing) {\n this.scheduleWork();\n }\n }\n }))();\n worker.runningTasks.push(runPromise);\n });\n }\n scheduleWork() {\n this.debug(`Attempt de-queueing a task in order to run it...`);\n const availableWorker = this.findIdlingWorker();\n if (!availableWorker)\n return;\n const nextTask = this.taskQueue.shift();\n if (!nextTask) {\n this.debug(`Task queue is empty`);\n this.eventSubject.next({ type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskQueueDrained });\n return;\n }\n this.run(availableWorker, nextTask);\n }\n taskCompletion(taskID) {\n return new Promise((resolve, reject) => {\n const eventSubscription = this.events().subscribe(event => {\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskCompleted && event.taskID === taskID) {\n eventSubscription.unsubscribe();\n resolve(event.returnValue);\n }\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskFailed && event.taskID === taskID) {\n eventSubscription.unsubscribe();\n reject(event.error);\n }\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].terminated) {\n eventSubscription.unsubscribe();\n reject(Error(\"Pool has been terminated before task was run.\"));\n }\n });\n });\n }\n settled(allowResolvingImmediately = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const getCurrentlyRunningTasks = () => flatMap(this.workers, worker => worker.runningTasks);\n const taskFailures = [];\n const failureSubscription = this.eventObservable.subscribe(event => {\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskFailed) {\n taskFailures.push(event.error);\n }\n });\n if (this.initErrors.length > 0) {\n return Promise.reject(this.initErrors[0]);\n }\n if (allowResolvingImmediately && this.taskQueue.length === 0) {\n yield Object(_ponyfills__WEBPACK_IMPORTED_MODULE_2__[\"allSettled\"])(getCurrentlyRunningTasks());\n return taskFailures;\n }\n yield new Promise((resolve, reject) => {\n const subscription = this.eventObservable.subscribe({\n next(event) {\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskQueueDrained) {\n subscription.unsubscribe();\n resolve(void 0);\n }\n },\n error: reject // make a pool-wide error reject the completed() result promise\n });\n });\n yield Object(_ponyfills__WEBPACK_IMPORTED_MODULE_2__[\"allSettled\"])(getCurrentlyRunningTasks());\n failureSubscription.unsubscribe();\n return taskFailures;\n });\n }\n completed(allowResolvingImmediately = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const settlementPromise = this.settled(allowResolvingImmediately);\n const earlyExitPromise = new Promise((resolve, reject) => {\n const subscription = this.eventObservable.subscribe({\n next(event) {\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskQueueDrained) {\n subscription.unsubscribe();\n resolve(settlementPromise);\n }\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskFailed) {\n subscription.unsubscribe();\n reject(event.error);\n }\n },\n error: reject // make a pool-wide error reject the completed() result promise\n });\n });\n const errors = yield Promise.race([\n settlementPromise,\n earlyExitPromise\n ]);\n if (errors.length > 0) {\n throw errors[0];\n }\n });\n }\n events() {\n return this.eventObservable;\n }\n queue(taskFunction) {\n const { maxQueuedJobs = Infinity } = this.options;\n if (this.isClosing) {\n throw Error(`Cannot schedule pool tasks after terminate() has been called.`);\n }\n if (this.initErrors.length > 0) {\n throw this.initErrors[0];\n }\n const taskID = this.nextTaskID++;\n const taskCompletion = this.taskCompletion(taskID);\n taskCompletion.catch((error) => {\n // Prevent unhandled rejections here as we assume the user will use\n // `pool.completed()`, `pool.settled()` or `task.catch()` to handle errors\n this.debug(`Task #${taskID} errored:`, error);\n });\n const task = {\n id: taskID,\n run: taskFunction,\n cancel: () => {\n if (this.taskQueue.indexOf(task) === -1)\n return;\n this.taskQueue = this.taskQueue.filter(someTask => someTask !== task);\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskCanceled,\n taskID: task.id\n });\n },\n then: taskCompletion.then.bind(taskCompletion)\n };\n if (this.taskQueue.length >= maxQueuedJobs) {\n throw Error(\"Maximum number of pool tasks queued. Refusing to queue another one.\\n\" +\n \"This usually happens for one of two reasons: We are either at peak \" +\n \"workload right now or some tasks just won't finish, thus blocking the pool.\");\n }\n this.debug(`Queueing task #${task.id}...`);\n this.taskQueue.push(task);\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].taskQueued,\n taskID: task.id\n });\n this.scheduleWork();\n return task;\n }\n terminate(force) {\n return __awaiter(this, void 0, void 0, function* () {\n this.isClosing = true;\n if (!force) {\n yield this.completed(true);\n }\n this.eventSubject.next({\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"].terminated,\n remainingQueue: [...this.taskQueue]\n });\n this.eventSubject.complete();\n yield Promise.all(this.workers.map((worker) => __awaiter(this, void 0, void 0, function* () { return _thread__WEBPACK_IMPORTED_MODULE_5__[\"Thread\"].terminate(yield worker.init); })));\n });\n }\n}\nWorkerPool.EventType = _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"];\n/**\n * Thread pool constructor. Creates a new pool and spawns its worker threads.\n */\nfunction PoolConstructor(spawnWorker, optionsOrSize) {\n // The function exists only so we don't need to use `new` to create a pool (we still can, though).\n // If the Pool is a class or not is an implementation detail that should not concern the user.\n return new WorkerPool(spawnWorker, optionsOrSize);\n}\nPoolConstructor.EventType = _pool_types__WEBPACK_IMPORTED_MODULE_4__[\"PoolEventType\"];\n/**\n * Thread pool constructor. Creates a new pool and spawns its worker threads.\n */\nconst Pool = PoolConstructor;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/pool.js?");
1058
1059/***/ }),
1060
1061/***/ "./node_modules/threads/dist-esm/master/spawn.js":
1062/*!*******************************************************!*\
1063 !*** ./node_modules/threads/dist-esm/master/spawn.js ***!
1064 \*******************************************************/
1065/*! exports provided: spawn */
1066/***/ (function(module, __webpack_exports__, __webpack_require__) {
1067
1068"use strict";
1069eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"spawn\", function() { return spawn; });\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \"./node_modules/threads/node_modules/debug/src/index.js\");\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \"./node_modules/observable-fns/dist.esm/index.js\");\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../common */ \"./node_modules/threads/dist-esm/common.js\");\n/* harmony import */ var _promise__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../promise */ \"./node_modules/threads/dist-esm/promise.js\");\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../symbols */ \"./node_modules/threads/dist-esm/symbols.js\");\n/* harmony import */ var _types_master__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/master */ \"./node_modules/threads/dist-esm/types/master.js\");\n/* harmony import */ var _invocation_proxy__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./invocation-proxy */ \"./node_modules/threads/dist-esm/master/invocation-proxy.js\");\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\n\n\n\n\nconst debugMessages = debug__WEBPACK_IMPORTED_MODULE_0___default()(\"threads:master:messages\");\nconst debugSpawn = debug__WEBPACK_IMPORTED_MODULE_0___default()(\"threads:master:spawn\");\nconst debugThreadUtils = debug__WEBPACK_IMPORTED_MODULE_0___default()(\"threads:master:thread-utils\");\nconst isInitMessage = (data) => data && data.type === \"init\";\nconst isUncaughtErrorMessage = (data) => data && data.type === \"uncaughtError\";\nconst initMessageTimeout = typeof process !== \"undefined\" && process.env.THREADS_WORKER_INIT_TIMEOUT\n ? Number.parseInt(process.env.THREADS_WORKER_INIT_TIMEOUT, 10)\n : 10000;\nfunction withTimeout(promise, timeoutInMs, errorMessage) {\n return __awaiter(this, void 0, void 0, function* () {\n let timeoutHandle;\n const timeout = new Promise((resolve, reject) => {\n timeoutHandle = setTimeout(() => reject(Error(errorMessage)), timeoutInMs);\n });\n const result = yield Promise.race([\n promise,\n timeout\n ]);\n clearTimeout(timeoutHandle);\n return result;\n });\n}\nfunction receiveInitMessage(worker) {\n return new Promise((resolve, reject) => {\n const messageHandler = ((event) => {\n debugMessages(\"Message from worker before finishing initialization:\", event.data);\n if (isInitMessage(event.data)) {\n worker.removeEventListener(\"message\", messageHandler);\n resolve(event.data);\n }\n else if (isUncaughtErrorMessage(event.data)) {\n worker.removeEventListener(\"message\", messageHandler);\n reject(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\"deserialize\"])(event.data.error));\n }\n });\n worker.addEventListener(\"message\", messageHandler);\n });\n}\nfunction createEventObservable(worker, workerTermination) {\n return new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\"Observable\"](observer => {\n const messageHandler = ((messageEvent) => {\n const workerEvent = {\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\"WorkerEventType\"].message,\n data: messageEvent.data\n };\n observer.next(workerEvent);\n });\n const rejectionHandler = ((errorEvent) => {\n debugThreadUtils(\"Unhandled promise rejection event in thread:\", errorEvent);\n const workerEvent = {\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\"WorkerEventType\"].internalError,\n error: Error(errorEvent.reason)\n };\n observer.next(workerEvent);\n });\n worker.addEventListener(\"message\", messageHandler);\n worker.addEventListener(\"unhandledrejection\", rejectionHandler);\n workerTermination.then(() => {\n const terminationEvent = {\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\"WorkerEventType\"].termination\n };\n worker.removeEventListener(\"message\", messageHandler);\n worker.removeEventListener(\"unhandledrejection\", rejectionHandler);\n observer.next(terminationEvent);\n observer.complete();\n });\n });\n}\nfunction createTerminator(worker) {\n const [termination, resolver] = Object(_promise__WEBPACK_IMPORTED_MODULE_3__[\"createPromiseWithResolver\"])();\n const terminate = () => __awaiter(this, void 0, void 0, function* () {\n debugThreadUtils(\"Terminating worker\");\n // Newer versions of worker_threads workers return a promise\n yield worker.terminate();\n resolver();\n });\n return { terminate, termination };\n}\nfunction setPrivateThreadProps(raw, worker, workerEvents, terminate) {\n const workerErrors = workerEvents\n .filter(event => event.type === _types_master__WEBPACK_IMPORTED_MODULE_5__[\"WorkerEventType\"].internalError)\n .map(errorEvent => errorEvent.error);\n // tslint:disable-next-line prefer-object-spread\n return Object.assign(raw, {\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\"$errors\"]]: workerErrors,\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\"$events\"]]: workerEvents,\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\"$terminate\"]]: terminate,\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\"$worker\"]]: worker\n });\n}\n/**\n * Spawn a new thread. Takes a fresh worker instance, wraps it in a thin\n * abstraction layer to provide the transparent API and verifies that\n * the worker has initialized successfully.\n *\n * @param worker Instance of `Worker`. Either a web worker, `worker_threads` worker or `tiny-worker` worker.\n * @param [options]\n * @param [options.timeout] Init message timeout. Default: 10000 or set by environment variable.\n */\nfunction spawn(worker, options) {\n return __awaiter(this, void 0, void 0, function* () {\n debugSpawn(\"Initializing new thread\");\n const timeout = options && options.timeout ? options.timeout : initMessageTimeout;\n const initMessage = yield withTimeout(receiveInitMessage(worker), timeout, `Timeout: Did not receive an init message from worker after ${timeout}ms. Make sure the worker calls expose().`);\n const exposed = initMessage.exposed;\n const { termination, terminate } = createTerminator(worker);\n const events = createEventObservable(worker, termination);\n if (exposed.type === \"function\") {\n const proxy = Object(_invocation_proxy__WEBPACK_IMPORTED_MODULE_6__[\"createProxyFunction\"])(worker);\n return setPrivateThreadProps(proxy, worker, events, terminate);\n }\n else if (exposed.type === \"module\") {\n const proxy = Object(_invocation_proxy__WEBPACK_IMPORTED_MODULE_6__[\"createProxyModule\"])(worker, exposed.methods);\n return setPrivateThreadProps(proxy, worker, events, terminate);\n }\n else {\n const type = exposed.type;\n throw Error(`Worker init message states unexpected type of expose(): ${type}`);\n }\n });\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/spawn.js?");
1070
1071/***/ }),
1072
1073/***/ "./node_modules/threads/dist-esm/master/thread.js":
1074/*!********************************************************!*\
1075 !*** ./node_modules/threads/dist-esm/master/thread.js ***!
1076 \********************************************************/
1077/*! exports provided: Thread */
1078/***/ (function(module, __webpack_exports__, __webpack_require__) {
1079
1080"use strict";
1081eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Thread\", function() { return Thread; });\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../symbols */ \"./node_modules/threads/dist-esm/symbols.js\");\n\nfunction fail(message) {\n throw Error(message);\n}\n/** Thread utility functions. Use them to manage or inspect a `spawn()`-ed thread. */\nconst Thread = {\n /** Return an observable that can be used to subscribe to all errors happening in the thread. */\n errors(thread) {\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\"$errors\"]] || fail(\"Error observable not found. Make sure to pass a thread instance as returned by the spawn() promise.\");\n },\n /** Return an observable that can be used to subscribe to internal events happening in the thread. Useful for debugging. */\n events(thread) {\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\"$events\"]] || fail(\"Events observable not found. Make sure to pass a thread instance as returned by the spawn() promise.\");\n },\n /** Terminate a thread. Remember to terminate every thread when you are done using it. */\n terminate(thread) {\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\"$terminate\"]]();\n }\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/thread.js?");
1082
1083/***/ }),
1084
1085/***/ "./node_modules/threads/dist-esm/observable-promise.js":
1086/*!*************************************************************!*\
1087 !*** ./node_modules/threads/dist-esm/observable-promise.js ***!
1088 \*************************************************************/
1089/*! exports provided: ObservablePromise */
1090/***/ (function(module, __webpack_exports__, __webpack_require__) {
1091
1092"use strict";
1093eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"ObservablePromise\", function() { return ObservablePromise; });\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! observable-fns */ \"./node_modules/observable-fns/dist.esm/index.js\");\n\nconst doNothing = () => undefined;\nconst returnInput = (input) => input;\nconst runDeferred = (fn) => Promise.resolve().then(fn);\nfunction fail(error) {\n throw error;\n}\nfunction isThenable(thing) {\n return thing && typeof thing.then === \"function\";\n}\n/**\n * Creates a hybrid, combining the APIs of an Observable and a Promise.\n *\n * It is used to proxy async process states when we are initially not sure\n * if that async process will yield values once (-> Promise) or multiple\n * times (-> Observable).\n *\n * Note that the observable promise inherits some of the observable's characteristics:\n * The `init` function will be called *once for every time anyone subscribes to it*.\n *\n * If this is undesired, derive a hot observable from it using `makeHot()` and\n * subscribe to that.\n */\nclass ObservablePromise extends observable_fns__WEBPACK_IMPORTED_MODULE_0__[\"Observable\"] {\n constructor(init) {\n super((originalObserver) => {\n // tslint:disable-next-line no-this-assignment\n const self = this;\n const observer = Object.assign(Object.assign({}, originalObserver), { complete() {\n originalObserver.complete();\n self.onCompletion();\n }, error(error) {\n originalObserver.error(error);\n self.onError(error);\n },\n next(value) {\n originalObserver.next(value);\n self.onNext(value);\n } });\n try {\n this.initHasRun = true;\n return init(observer);\n }\n catch (error) {\n observer.error(error);\n }\n });\n this.initHasRun = false;\n this.fulfillmentCallbacks = [];\n this.rejectionCallbacks = [];\n this.firstValueSet = false;\n this.state = \"pending\";\n }\n onNext(value) {\n if (!this.firstValueSet) {\n this.firstValue = value;\n this.firstValueSet = true;\n }\n }\n onError(error) {\n this.state = \"rejected\";\n this.rejection = error;\n for (const onRejected of this.rejectionCallbacks) {\n // Promisifying the call to turn errors into unhandled promise rejections\n // instead of them failing sync and cancelling the iteration\n runDeferred(() => onRejected(error));\n }\n }\n onCompletion() {\n this.state = \"fulfilled\";\n for (const onFulfilled of this.fulfillmentCallbacks) {\n // Promisifying the call to turn errors into unhandled promise rejections\n // instead of them failing sync and cancelling the iteration\n runDeferred(() => onFulfilled(this.firstValue));\n }\n }\n then(onFulfilledRaw, onRejectedRaw) {\n const onFulfilled = onFulfilledRaw || returnInput;\n const onRejected = onRejectedRaw || fail;\n let onRejectedCalled = false;\n return new Promise((resolve, reject) => {\n const rejectionCallback = (error) => {\n if (onRejectedCalled)\n return;\n onRejectedCalled = true;\n try {\n resolve(onRejected(error));\n }\n catch (anotherError) {\n reject(anotherError);\n }\n };\n const fulfillmentCallback = (value) => {\n try {\n resolve(onFulfilled(value));\n }\n catch (error) {\n rejectionCallback(error);\n }\n };\n if (!this.initHasRun) {\n this.subscribe({ error: rejectionCallback });\n }\n if (this.state === \"fulfilled\") {\n return resolve(onFulfilled(this.firstValue));\n }\n if (this.state === \"rejected\") {\n onRejectedCalled = true;\n return resolve(onRejected(this.rejection));\n }\n this.fulfillmentCallbacks.push(fulfillmentCallback);\n this.rejectionCallbacks.push(rejectionCallback);\n });\n }\n catch(onRejected) {\n return this.then(undefined, onRejected);\n }\n finally(onCompleted) {\n const handler = onCompleted || doNothing;\n return this.then((value) => {\n handler();\n return value;\n }, () => handler());\n }\n static from(thing) {\n if (isThenable(thing)) {\n return new ObservablePromise(observer => {\n const onFulfilled = (value) => {\n observer.next(value);\n observer.complete();\n };\n const onRejected = (error) => {\n observer.error(error);\n };\n thing.then(onFulfilled, onRejected);\n });\n }\n else {\n return super.from(thing);\n }\n }\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/observable-promise.js?");
1094
1095/***/ }),
1096
1097/***/ "./node_modules/threads/dist-esm/ponyfills.js":
1098/*!****************************************************!*\
1099 !*** ./node_modules/threads/dist-esm/ponyfills.js ***!
1100 \****************************************************/
1101/*! exports provided: allSettled */
1102/***/ (function(module, __webpack_exports__, __webpack_require__) {
1103
1104"use strict";
1105eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"allSettled\", function() { return allSettled; });\n// Based on <https://github.com/es-shims/Promise.allSettled/blob/master/implementation.js>\nfunction allSettled(values) {\n return Promise.all(values.map(item => {\n const onFulfill = (value) => {\n return { status: 'fulfilled', value };\n };\n const onReject = (reason) => {\n return { status: 'rejected', reason };\n };\n const itemPromise = Promise.resolve(item);\n try {\n return itemPromise.then(onFulfill, onReject);\n }\n catch (error) {\n return Promise.reject(error);\n }\n }));\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/ponyfills.js?");
1106
1107/***/ }),
1108
1109/***/ "./node_modules/threads/dist-esm/promise.js":
1110/*!**************************************************!*\
1111 !*** ./node_modules/threads/dist-esm/promise.js ***!
1112 \**************************************************/
1113/*! exports provided: createPromiseWithResolver */
1114/***/ (function(module, __webpack_exports__, __webpack_require__) {
1115
1116"use strict";
1117eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"createPromiseWithResolver\", function() { return createPromiseWithResolver; });\nconst doNothing = () => undefined;\n/**\n * Creates a new promise and exposes its resolver function.\n * Use with care!\n */\nfunction createPromiseWithResolver() {\n let alreadyResolved = false;\n let resolvedTo;\n let resolver = doNothing;\n const promise = new Promise(resolve => {\n if (alreadyResolved) {\n resolve(resolvedTo);\n }\n else {\n resolver = resolve;\n }\n });\n const exposedResolver = (value) => {\n alreadyResolved = true;\n resolvedTo = value;\n resolver(resolvedTo);\n };\n return [promise, exposedResolver];\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/promise.js?");
1118
1119/***/ }),
1120
1121/***/ "./node_modules/threads/dist-esm/serializers.js":
1122/*!******************************************************!*\
1123 !*** ./node_modules/threads/dist-esm/serializers.js ***!
1124 \******************************************************/
1125/*! exports provided: extendSerializer, DefaultSerializer */
1126/***/ (function(module, __webpack_exports__, __webpack_require__) {
1127
1128"use strict";
1129eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"extendSerializer\", function() { return extendSerializer; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"DefaultSerializer\", function() { return DefaultSerializer; });\nfunction extendSerializer(extend, implementation) {\n const fallbackDeserializer = extend.deserialize.bind(extend);\n const fallbackSerializer = extend.serialize.bind(extend);\n return {\n deserialize(message) {\n return implementation.deserialize(message, fallbackDeserializer);\n },\n serialize(input) {\n return implementation.serialize(input, fallbackSerializer);\n }\n };\n}\nconst DefaultErrorSerializer = {\n deserialize(message) {\n return Object.assign(Error(message.message), {\n name: message.name,\n stack: message.stack\n });\n },\n serialize(error) {\n return {\n __error_marker: \"$$error\",\n message: error.message,\n name: error.name,\n stack: error.stack\n };\n }\n};\nconst isSerializedError = (thing) => thing && typeof thing === \"object\" && \"__error_marker\" in thing && thing.__error_marker === \"$$error\";\nconst DefaultSerializer = {\n deserialize(message) {\n if (isSerializedError(message)) {\n return DefaultErrorSerializer.deserialize(message);\n }\n else {\n return message;\n }\n },\n serialize(input) {\n if (input instanceof Error) {\n return DefaultErrorSerializer.serialize(input);\n }\n else {\n return input;\n }\n }\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/serializers.js?");
1130
1131/***/ }),
1132
1133/***/ "./node_modules/threads/dist-esm/symbols.js":
1134/*!**************************************************!*\
1135 !*** ./node_modules/threads/dist-esm/symbols.js ***!
1136 \**************************************************/
1137/*! exports provided: $errors, $events, $terminate, $transferable, $worker */
1138/***/ (function(module, __webpack_exports__, __webpack_require__) {
1139
1140"use strict";
1141eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"$errors\", function() { return $errors; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"$events\", function() { return $events; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"$terminate\", function() { return $terminate; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"$transferable\", function() { return $transferable; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"$worker\", function() { return $worker; });\nconst $errors = Symbol(\"thread.errors\");\nconst $events = Symbol(\"thread.events\");\nconst $terminate = Symbol(\"thread.terminate\");\nconst $transferable = Symbol(\"thread.transferable\");\nconst $worker = Symbol(\"thread.worker\");\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/symbols.js?");
1142
1143/***/ }),
1144
1145/***/ "./node_modules/threads/dist-esm/transferable.js":
1146/*!*******************************************************!*\
1147 !*** ./node_modules/threads/dist-esm/transferable.js ***!
1148 \*******************************************************/
1149/*! exports provided: isTransferDescriptor, Transfer */
1150/***/ (function(module, __webpack_exports__, __webpack_require__) {
1151
1152"use strict";
1153eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isTransferDescriptor\", function() { return isTransferDescriptor; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"Transfer\", function() { return Transfer; });\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./symbols */ \"./node_modules/threads/dist-esm/symbols.js\");\n\nfunction isTransferable(thing) {\n if (!thing || typeof thing !== \"object\")\n return false;\n // Don't check too thoroughly, since the list of transferable things in JS might grow over time\n return true;\n}\nfunction isTransferDescriptor(thing) {\n return thing && typeof thing === \"object\" && thing[_symbols__WEBPACK_IMPORTED_MODULE_0__[\"$transferable\"]];\n}\nfunction Transfer(payload, transferables) {\n if (!transferables) {\n if (!isTransferable(payload))\n throw Error();\n transferables = [payload];\n }\n return {\n [_symbols__WEBPACK_IMPORTED_MODULE_0__[\"$transferable\"]]: true,\n send: payload,\n transferables\n };\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/transferable.js?");
1154
1155/***/ }),
1156
1157/***/ "./node_modules/threads/dist-esm/types/master.js":
1158/*!*******************************************************!*\
1159 !*** ./node_modules/threads/dist-esm/types/master.js ***!
1160 \*******************************************************/
1161/*! exports provided: WorkerEventType */
1162/***/ (function(module, __webpack_exports__, __webpack_require__) {
1163
1164"use strict";
1165eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"WorkerEventType\", function() { return WorkerEventType; });\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../symbols */ \"./node_modules/threads/dist-esm/symbols.js\");\n/// <reference lib=\"dom\" />\n// tslint:disable max-classes-per-file\n\n/** Event as emitted by worker thread. Subscribe to using `Thread.events(thread)`. */\nvar WorkerEventType;\n(function (WorkerEventType) {\n WorkerEventType[\"internalError\"] = \"internalError\";\n WorkerEventType[\"message\"] = \"message\";\n WorkerEventType[\"termination\"] = \"termination\";\n})(WorkerEventType || (WorkerEventType = {}));\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/types/master.js?");
1166
1167/***/ }),
1168
1169/***/ "./node_modules/threads/dist-esm/types/messages.js":
1170/*!*********************************************************!*\
1171 !*** ./node_modules/threads/dist-esm/types/messages.js ***!
1172 \*********************************************************/
1173/*! exports provided: MasterMessageType, WorkerMessageType */
1174/***/ (function(module, __webpack_exports__, __webpack_require__) {
1175
1176"use strict";
1177eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"MasterMessageType\", function() { return MasterMessageType; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"WorkerMessageType\", function() { return WorkerMessageType; });\n/////////////////////////////\n// Messages sent by master:\nvar MasterMessageType;\n(function (MasterMessageType) {\n MasterMessageType[\"cancel\"] = \"cancel\";\n MasterMessageType[\"run\"] = \"run\";\n})(MasterMessageType || (MasterMessageType = {}));\n////////////////////////////\n// Messages sent by worker:\nvar WorkerMessageType;\n(function (WorkerMessageType) {\n WorkerMessageType[\"error\"] = \"error\";\n WorkerMessageType[\"init\"] = \"init\";\n WorkerMessageType[\"result\"] = \"result\";\n WorkerMessageType[\"running\"] = \"running\";\n WorkerMessageType[\"uncaughtError\"] = \"uncaughtError\";\n})(WorkerMessageType || (WorkerMessageType = {}));\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/types/messages.js?");
1178
1179/***/ }),
1180
1181/***/ "./node_modules/threads/dist-esm/worker/implementation.browser.js":
1182/*!************************************************************************!*\
1183 !*** ./node_modules/threads/dist-esm/worker/implementation.browser.js ***!
1184 \************************************************************************/
1185/*! exports provided: default */
1186/***/ (function(module, __webpack_exports__, __webpack_require__) {
1187
1188"use strict";
1189eval("__webpack_require__.r(__webpack_exports__);\n/// <reference lib=\"dom\" />\n// tslint:disable no-shadowed-variable\nconst isWorkerRuntime = function isWorkerRuntime() {\n const isWindowContext = typeof self !== \"undefined\" && typeof Window !== \"undefined\" && self instanceof Window;\n return typeof self !== \"undefined\" && self.postMessage && !isWindowContext ? true : false;\n};\nconst postMessageToMaster = function postMessageToMaster(data, transferList) {\n self.postMessage(data, transferList);\n};\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\n const messageHandler = (messageEvent) => {\n onMessage(messageEvent.data);\n };\n const unsubscribe = () => {\n self.removeEventListener(\"message\", messageHandler);\n };\n self.addEventListener(\"message\", messageHandler);\n return unsubscribe;\n};\n/* harmony default export */ __webpack_exports__[\"default\"] = ({\n isWorkerRuntime,\n postMessageToMaster,\n subscribeToMasterMessages\n});\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.browser.js?");
1190
1191/***/ }),
1192
1193/***/ "./node_modules/threads/dist-esm/worker/implementation.js":
1194/*!****************************************************************!*\
1195 !*** ./node_modules/threads/dist-esm/worker/implementation.js ***!
1196 \****************************************************************/
1197/*! exports provided: default */
1198/***/ (function(module, __webpack_exports__, __webpack_require__) {
1199
1200"use strict";
1201eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _implementation_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation.browser */ \"./node_modules/threads/dist-esm/worker/implementation.browser.js\");\n/* harmony import */ var _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./implementation.tiny-worker */ \"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\");\n/* harmony import */ var _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./implementation.worker_threads */ \"./node_modules/threads/dist-esm/worker/implementation.worker_threads.js\");\n// tslint:disable no-var-requires\n/*\n * This file is only a stub to make './implementation' resolve to the right module.\n */\n\n\n\nconst runningInNode = typeof process !== 'undefined' && process.arch !== 'browser' && 'pid' in process;\nfunction selectNodeImplementation() {\n try {\n _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__[\"default\"].testImplementation();\n return _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__[\"default\"];\n }\n catch (error) {\n return _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1___default.a;\n }\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = (runningInNode\n ? selectNodeImplementation()\n : _implementation_browser__WEBPACK_IMPORTED_MODULE_0__[\"default\"]);\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.js?");
1202
1203/***/ }),
1204
1205/***/ "./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js":
1206/*!****************************************************************************!*\
1207 !*** ./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js ***!
1208 \****************************************************************************/
1209/*! no static exports found */
1210/***/ (function(module, exports, __webpack_require__) {
1211
1212eval("module.exports = function() {\n return __webpack_require__(/*! !./node_modules/worker-loader/dist/workers/InlineWorker.js */ \"./node_modules/worker-loader/dist/workers/InlineWorker.js\")(\"/******/ (function(modules) { // webpackBootstrap\\n/******/ \\t// The module cache\\n/******/ \\tvar installedModules = {};\\n/******/\\n/******/ \\t// The require function\\n/******/ \\tfunction __webpack_require__(moduleId) {\\n/******/\\n/******/ \\t\\t// Check if module is in cache\\n/******/ \\t\\tif(installedModules[moduleId]) {\\n/******/ \\t\\t\\treturn installedModules[moduleId].exports;\\n/******/ \\t\\t}\\n/******/ \\t\\t// Create a new module (and put it into the cache)\\n/******/ \\t\\tvar module = installedModules[moduleId] = {\\n/******/ \\t\\t\\ti: moduleId,\\n/******/ \\t\\t\\tl: false,\\n/******/ \\t\\t\\texports: {}\\n/******/ \\t\\t};\\n/******/\\n/******/ \\t\\t// Execute the module function\\n/******/ \\t\\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\\n/******/\\n/******/ \\t\\t// Flag the module as loaded\\n/******/ \\t\\tmodule.l = true;\\n/******/\\n/******/ \\t\\t// Return the exports of the module\\n/******/ \\t\\treturn module.exports;\\n/******/ \\t}\\n/******/\\n/******/\\n/******/ \\t// expose the modules object (__webpack_modules__)\\n/******/ \\t__webpack_require__.m = modules;\\n/******/\\n/******/ \\t// expose the module cache\\n/******/ \\t__webpack_require__.c = installedModules;\\n/******/\\n/******/ \\t// define getter function for harmony exports\\n/******/ \\t__webpack_require__.d = function(exports, name, getter) {\\n/******/ \\t\\tif(!__webpack_require__.o(exports, name)) {\\n/******/ \\t\\t\\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\\n/******/ \\t\\t}\\n/******/ \\t};\\n/******/\\n/******/ \\t// define __esModule on exports\\n/******/ \\t__webpack_require__.r = function(exports) {\\n/******/ \\t\\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\\n/******/ \\t\\t\\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\\n/******/ \\t\\t}\\n/******/ \\t\\tObject.defineProperty(exports, '__esModule', { value: true });\\n/******/ \\t};\\n/******/\\n/******/ \\t// create a fake namespace object\\n/******/ \\t// mode & 1: value is a module id, require it\\n/******/ \\t// mode & 2: merge all properties of value into the ns\\n/******/ \\t// mode & 4: return value when already ns object\\n/******/ \\t// mode & 8|1: behave like require\\n/******/ \\t__webpack_require__.t = function(value, mode) {\\n/******/ \\t\\tif(mode & 1) value = __webpack_require__(value);\\n/******/ \\t\\tif(mode & 8) return value;\\n/******/ \\t\\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\\n/******/ \\t\\tvar ns = Object.create(null);\\n/******/ \\t\\t__webpack_require__.r(ns);\\n/******/ \\t\\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\\n/******/ \\t\\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\\n/******/ \\t\\treturn ns;\\n/******/ \\t};\\n/******/\\n/******/ \\t// getDefaultExport function for compatibility with non-harmony modules\\n/******/ \\t__webpack_require__.n = function(module) {\\n/******/ \\t\\tvar getter = module && module.__esModule ?\\n/******/ \\t\\t\\tfunction getDefault() { return module['default']; } :\\n/******/ \\t\\t\\tfunction getModuleExports() { return module; };\\n/******/ \\t\\t__webpack_require__.d(getter, 'a', getter);\\n/******/ \\t\\treturn getter;\\n/******/ \\t};\\n/******/\\n/******/ \\t// Object.prototype.hasOwnProperty.call\\n/******/ \\t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\\n/******/\\n/******/ \\t// __webpack_public_path__\\n/******/ \\t__webpack_require__.p = \\\"\\\";\\n/******/\\n/******/\\n/******/ \\t// Load entry module and return exports\\n/******/ \\treturn __webpack_require__(__webpack_require__.s = \\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\");\\n/******/ })\\n/************************************************************************/\\n/******/ ({\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\":\\n/*!****************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js ***!\\n \\\\****************************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/// <reference lib=\\\\\\\"dom\\\\\\\" />\\\\n// tslint:disable no-shadowed-variable\\\\nif (typeof self === \\\\\\\"undefined\\\\\\\") {\\\\n global.self = global;\\\\n}\\\\nconst isWorkerRuntime = function isWorkerRuntime() {\\\\n return typeof self !== \\\\\\\"undefined\\\\\\\" && self.postMessage ? true : false;\\\\n};\\\\nconst postMessageToMaster = function postMessageToMaster(data) {\\\\n // TODO: Warn that Transferables are not supported on first attempt to use feature\\\\n self.postMessage(data);\\\\n};\\\\nlet muxingHandlerSetUp = false;\\\\nconst messageHandlers = new Set();\\\\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\\\\n if (!muxingHandlerSetUp) {\\\\n // We have one multiplexing message handler as tiny-worker's\\\\n // addEventListener() only allows you to set a single message handler\\\\n self.addEventListener(\\\\\\\"message\\\\\\\", ((event) => {\\\\n messageHandlers.forEach(handler => handler(event.data));\\\\n }));\\\\n muxingHandlerSetUp = true;\\\\n }\\\\n messageHandlers.add(onMessage);\\\\n const unsubscribe = () => messageHandlers.delete(onMessage);\\\\n return unsubscribe;\\\\n};\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = ({\\\\n isWorkerRuntime,\\\\n postMessageToMaster,\\\\n subscribeToMasterMessages\\\\n});\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js?\\\");\\n\\n/***/ })\\n\\n/******/ });\", null);\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js?");
1213
1214/***/ }),
1215
1216/***/ "./node_modules/threads/dist-esm/worker/implementation.worker_threads.js":
1217/*!*******************************************************************************!*\
1218 !*** ./node_modules/threads/dist-esm/worker/implementation.worker_threads.js ***!
1219 \*******************************************************************************/
1220/*! exports provided: default */
1221/***/ (function(module, __webpack_exports__, __webpack_require__) {
1222
1223"use strict";
1224eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _worker_threads__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../worker_threads */ \"./node_modules/threads/dist-esm/worker_threads.js\");\n\nfunction assertMessagePort(port) {\n if (!port) {\n throw Error(\"Invariant violation: MessagePort to parent is not available.\");\n }\n return port;\n}\nconst isWorkerRuntime = function isWorkerRuntime() {\n return !Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\"default\"])().isMainThread;\n};\nconst postMessageToMaster = function postMessageToMaster(data, transferList) {\n assertMessagePort(Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\"default\"])().parentPort).postMessage(data, transferList);\n};\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\n const parentPort = Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\"default\"])().parentPort;\n if (!parentPort) {\n throw Error(\"Invariant violation: MessagePort to parent is not available.\");\n }\n const messageHandler = (message) => {\n onMessage(message);\n };\n const unsubscribe = () => {\n assertMessagePort(parentPort).off(\"message\", messageHandler);\n };\n assertMessagePort(parentPort).on(\"message\", messageHandler);\n return unsubscribe;\n};\nfunction testImplementation() {\n // Will throw if `worker_threads` are not available\n Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\"default\"])();\n}\n/* harmony default export */ __webpack_exports__[\"default\"] = ({\n isWorkerRuntime,\n postMessageToMaster,\n subscribeToMasterMessages,\n testImplementation\n});\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.worker_threads.js?");
1225
1226/***/ }),
1227
1228/***/ "./node_modules/threads/dist-esm/worker/index.js":
1229/*!*******************************************************!*\
1230 !*** ./node_modules/threads/dist-esm/worker/index.js ***!
1231 \*******************************************************/
1232/*! exports provided: registerSerializer, Transfer, isWorkerRuntime, expose */
1233/***/ (function(module, __webpack_exports__, __webpack_require__) {
1234
1235"use strict";
1236eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"isWorkerRuntime\", function() { return isWorkerRuntime; });\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"expose\", function() { return expose; });\n/* harmony import */ var is_observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! is-observable */ \"./node_modules/is-observable/index.js\");\n/* harmony import */ var is_observable__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(is_observable__WEBPACK_IMPORTED_MODULE_0__);\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../common */ \"./node_modules/threads/dist-esm/common.js\");\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../transferable */ \"./node_modules/threads/dist-esm/transferable.js\");\n/* harmony import */ var _types_messages__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/messages */ \"./node_modules/threads/dist-esm/types/messages.js\");\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./implementation */ \"./node_modules/threads/dist-esm/worker/implementation.js\");\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"registerSerializer\", function() { return _common__WEBPACK_IMPORTED_MODULE_1__[\"registerSerializer\"]; });\n\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \"Transfer\", function() { return _transferable__WEBPACK_IMPORTED_MODULE_2__[\"Transfer\"]; });\n\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\n\n\n\n\n\n\n\n/** Returns `true` if this code is currently running in a worker. */\nconst isWorkerRuntime = _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].isWorkerRuntime;\nlet exposeCalled = false;\nconst activeSubscriptions = new Map();\nconst isMasterJobCancelMessage = (thing) => thing && thing.type === _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"MasterMessageType\"].cancel;\nconst isMasterJobRunMessage = (thing) => thing && thing.type === _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"MasterMessageType\"].run;\n/**\n * There are issues with `is-observable` not recognizing zen-observable's instances.\n * We are using `observable-fns`, but it's based on zen-observable, too.\n */\nconst isObservable = (thing) => is_observable__WEBPACK_IMPORTED_MODULE_0___default()(thing) || isZenObservable(thing);\nfunction isZenObservable(thing) {\n return thing && typeof thing === \"object\" && typeof thing.subscribe === \"function\";\n}\nfunction deconstructTransfer(thing) {\n return Object(_transferable__WEBPACK_IMPORTED_MODULE_2__[\"isTransferDescriptor\"])(thing)\n ? { payload: thing.send, transferables: thing.transferables }\n : { payload: thing, transferables: undefined };\n}\nfunction postFunctionInitMessage() {\n const initMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].init,\n exposed: {\n type: \"function\"\n }\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(initMessage);\n}\nfunction postModuleInitMessage(methodNames) {\n const initMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].init,\n exposed: {\n type: \"module\",\n methods: methodNames\n }\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(initMessage);\n}\nfunction postJobErrorMessage(uid, rawError) {\n const { payload: error, transferables } = deconstructTransfer(rawError);\n const errorMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].error,\n uid,\n error: Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(error)\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(errorMessage, transferables);\n}\nfunction postJobResultMessage(uid, completed, resultValue) {\n const { payload, transferables } = deconstructTransfer(resultValue);\n const resultMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].result,\n uid,\n complete: completed ? true : undefined,\n payload\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(resultMessage, transferables);\n}\nfunction postJobStartMessage(uid, resultType) {\n const startMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].running,\n uid,\n resultType\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(startMessage);\n}\nfunction postUncaughtErrorMessage(error) {\n try {\n const errorMessage = {\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\"WorkerMessageType\"].uncaughtError,\n error: Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(error)\n };\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].postMessageToMaster(errorMessage);\n }\n catch (subError) {\n // tslint:disable-next-line no-console\n console.error(\"Not reporting uncaught error back to master thread as it \" +\n \"occured while reporting an uncaught error already.\" +\n \"\\nLatest error:\", subError, \"\\nOriginal error:\", error);\n }\n}\nfunction runFunction(jobUID, fn, args) {\n return __awaiter(this, void 0, void 0, function* () {\n let syncResult;\n try {\n syncResult = fn(...args);\n }\n catch (error) {\n return postJobErrorMessage(jobUID, error);\n }\n const resultType = isObservable(syncResult) ? \"observable\" : \"promise\";\n postJobStartMessage(jobUID, resultType);\n if (isObservable(syncResult)) {\n const subscription = syncResult.subscribe(value => postJobResultMessage(jobUID, false, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(value)), error => {\n postJobErrorMessage(jobUID, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(error));\n activeSubscriptions.delete(jobUID);\n }, () => {\n postJobResultMessage(jobUID, true);\n activeSubscriptions.delete(jobUID);\n });\n activeSubscriptions.set(jobUID, subscription);\n }\n else {\n try {\n const result = yield syncResult;\n postJobResultMessage(jobUID, true, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(result));\n }\n catch (error) {\n postJobErrorMessage(jobUID, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\"serialize\"])(error));\n }\n }\n });\n}\n/**\n * Expose a function or a module (an object whose values are functions)\n * to the main thread. Must be called exactly once in every worker thread\n * to signal its API to the main thread.\n *\n * @param exposed Function or object whose values are functions\n */\nfunction expose(exposed) {\n if (!_implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].isWorkerRuntime()) {\n throw Error(\"expose() called in the master thread.\");\n }\n if (exposeCalled) {\n throw Error(\"expose() called more than once. This is not possible. Pass an object to expose() if you want to expose multiple functions.\");\n }\n exposeCalled = true;\n if (typeof exposed === \"function\") {\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].subscribeToMasterMessages(messageData => {\n if (isMasterJobRunMessage(messageData) && !messageData.method) {\n runFunction(messageData.uid, exposed, messageData.args.map(_common__WEBPACK_IMPORTED_MODULE_1__[\"deserialize\"]));\n }\n });\n postFunctionInitMessage();\n }\n else if (typeof exposed === \"object\" && exposed) {\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].subscribeToMasterMessages(messageData => {\n if (isMasterJobRunMessage(messageData) && messageData.method) {\n runFunction(messageData.uid, exposed[messageData.method], messageData.args.map(_common__WEBPACK_IMPORTED_MODULE_1__[\"deserialize\"]));\n }\n });\n const methodNames = Object.keys(exposed).filter(key => typeof exposed[key] === \"function\");\n postModuleInitMessage(methodNames);\n }\n else {\n throw Error(`Invalid argument passed to expose(). Expected a function or an object, got: ${exposed}`);\n }\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].subscribeToMasterMessages(messageData => {\n if (isMasterJobCancelMessage(messageData)) {\n const jobUID = messageData.uid;\n const subscription = activeSubscriptions.get(jobUID);\n if (subscription) {\n subscription.unsubscribe();\n activeSubscriptions.delete(jobUID);\n }\n }\n });\n}\nif (typeof self !== \"undefined\" && typeof self.addEventListener === \"function\" && _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].isWorkerRuntime()) {\n self.addEventListener(\"error\", event => {\n // Post with some delay, so the master had some time to subscribe to messages\n setTimeout(() => postUncaughtErrorMessage(event.error || event), 250);\n });\n self.addEventListener(\"unhandledrejection\", event => {\n const error = event.reason;\n if (error && typeof error.message === \"string\") {\n // Post with some delay, so the master had some time to subscribe to messages\n setTimeout(() => postUncaughtErrorMessage(error), 250);\n }\n });\n}\nif (typeof process !== \"undefined\" && typeof process.on === \"function\" && _implementation__WEBPACK_IMPORTED_MODULE_4__[\"default\"].isWorkerRuntime()) {\n process.on(\"uncaughtException\", (error) => {\n // Post with some delay, so the master had some time to subscribe to messages\n setTimeout(() => postUncaughtErrorMessage(error), 250);\n });\n process.on(\"unhandledRejection\", (error) => {\n if (error && typeof error.message === \"string\") {\n // Post with some delay, so the master had some time to subscribe to messages\n setTimeout(() => postUncaughtErrorMessage(error), 250);\n }\n });\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/index.js?");
1237
1238/***/ }),
1239
1240/***/ "./node_modules/threads/dist-esm/worker_threads.js":
1241/*!*********************************************************!*\
1242 !*** ./node_modules/threads/dist-esm/worker_threads.js ***!
1243 \*********************************************************/
1244/*! exports provided: default */
1245/***/ (function(module, __webpack_exports__, __webpack_require__) {
1246
1247"use strict";
1248eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \"default\", function() { return getImplementation; });\n// Webpack hack\n// tslint:disable no-eval\nlet implementation;\nfunction selectImplementation() {\n return typeof require === \"function\"\n ? require(\"worker_threads\")\n : eval(\"require\")(\"worker_threads\");\n}\nfunction getImplementation() {\n if (!implementation) {\n implementation = selectImplementation();\n }\n return implementation;\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker_threads.js?");
1249
1250/***/ }),
1251
1252/***/ "./node_modules/threads/node_modules/debug/src/browser.js":
1253/*!****************************************************************!*\
1254 !*** ./node_modules/threads/node_modules/debug/src/browser.js ***!
1255 \****************************************************************/
1256/*! no static exports found */
1257/***/ (function(module, exports, __webpack_require__) {
1258
1259eval("/* eslint-env browser */\n\n/**\n * This is the web browser implementation of `debug()`.\n */\n\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.storage = localstorage();\nexports.destroy = (() => {\n\tlet warned = false;\n\n\treturn () => {\n\t\tif (!warned) {\n\t\t\twarned = true;\n\t\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t\t}\n\t};\n})();\n\n/**\n * Colors.\n */\n\nexports.colors = [\n\t'#0000CC',\n\t'#0000FF',\n\t'#0033CC',\n\t'#0033FF',\n\t'#0066CC',\n\t'#0066FF',\n\t'#0099CC',\n\t'#0099FF',\n\t'#00CC00',\n\t'#00CC33',\n\t'#00CC66',\n\t'#00CC99',\n\t'#00CCCC',\n\t'#00CCFF',\n\t'#3300CC',\n\t'#3300FF',\n\t'#3333CC',\n\t'#3333FF',\n\t'#3366CC',\n\t'#3366FF',\n\t'#3399CC',\n\t'#3399FF',\n\t'#33CC00',\n\t'#33CC33',\n\t'#33CC66',\n\t'#33CC99',\n\t'#33CCCC',\n\t'#33CCFF',\n\t'#6600CC',\n\t'#6600FF',\n\t'#6633CC',\n\t'#6633FF',\n\t'#66CC00',\n\t'#66CC33',\n\t'#9900CC',\n\t'#9900FF',\n\t'#9933CC',\n\t'#9933FF',\n\t'#99CC00',\n\t'#99CC33',\n\t'#CC0000',\n\t'#CC0033',\n\t'#CC0066',\n\t'#CC0099',\n\t'#CC00CC',\n\t'#CC00FF',\n\t'#CC3300',\n\t'#CC3333',\n\t'#CC3366',\n\t'#CC3399',\n\t'#CC33CC',\n\t'#CC33FF',\n\t'#CC6600',\n\t'#CC6633',\n\t'#CC9900',\n\t'#CC9933',\n\t'#CCCC00',\n\t'#CCCC33',\n\t'#FF0000',\n\t'#FF0033',\n\t'#FF0066',\n\t'#FF0099',\n\t'#FF00CC',\n\t'#FF00FF',\n\t'#FF3300',\n\t'#FF3333',\n\t'#FF3366',\n\t'#FF3399',\n\t'#FF33CC',\n\t'#FF33FF',\n\t'#FF6600',\n\t'#FF6633',\n\t'#FF9900',\n\t'#FF9933',\n\t'#FFCC00',\n\t'#FFCC33'\n];\n\n/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */\n\n// eslint-disable-next-line complexity\nfunction useColors() {\n\t// NB: In an Electron preload script, document will be defined but not fully\n\t// initialized. Since we know we're in Chrome, we'll just detect this case\n\t// explicitly\n\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\n\t\treturn true;\n\t}\n\n\t// Internet Explorer and Edge do not support colors.\n\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\/(\\d+)/)) {\n\t\treturn false;\n\t}\n\n\t// Is webkit? http://stackoverflow.com/a/16459606/376773\n\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n\t\t// Is firebug? http://stackoverflow.com/a/398120/376773\n\t\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n\t\t// Is firefox >= v31?\n\t\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n\t\t// Double check webkit in userAgent just in case we are in a worker\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}\n\n/**\n * Colorize log arguments if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\targs[0] = (this.useColors ? '%c' : '') +\n\t\tthis.namespace +\n\t\t(this.useColors ? ' %c' : ' ') +\n\t\targs[0] +\n\t\t(this.useColors ? '%c ' : ' ') +\n\t\t'+' + module.exports.humanize(this.diff);\n\n\tif (!this.useColors) {\n\t\treturn;\n\t}\n\n\tconst c = 'color: ' + this.color;\n\targs.splice(1, 0, c, 'color: inherit');\n\n\t// The final \"%c\" is somewhat tricky, because there could be other\n\t// arguments passed either before or after the %c, so we need to\n\t// figure out the correct index to insert the CSS into\n\tlet index = 0;\n\tlet lastC = 0;\n\targs[0].replace(/%[a-zA-Z%]/g, match => {\n\t\tif (match === '%%') {\n\t\t\treturn;\n\t\t}\n\t\tindex++;\n\t\tif (match === '%c') {\n\t\t\t// We only are interested in the *last* %c\n\t\t\t// (the user may have provided their own)\n\t\t\tlastC = index;\n\t\t}\n\t});\n\n\targs.splice(lastC, 0, c);\n}\n\n/**\n * Invokes `console.debug()` when available.\n * No-op when `console.debug` is not a \"function\".\n * If `console.debug` is not available, falls back\n * to `console.log`.\n *\n * @api public\n */\nexports.log = console.debug || console.log || (() => {});\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\ttry {\n\t\tif (namespaces) {\n\t\t\texports.storage.setItem('debug', namespaces);\n\t\t} else {\n\t\t\texports.storage.removeItem('debug');\n\t\t}\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\nfunction load() {\n\tlet r;\n\ttry {\n\t\tr = exports.storage.getItem('debug');\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n\n\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\n\tif (!r && typeof process !== 'undefined' && 'env' in process) {\n\t\tr = process.env.DEBUG;\n\t}\n\n\treturn r;\n}\n\n/**\n * Localstorage attempts to return the localstorage.\n *\n * This is necessary because safari throws\n * when a user disables cookies/localstorage\n * and you attempt to access it.\n *\n * @return {LocalStorage}\n * @api private\n */\n\nfunction localstorage() {\n\ttry {\n\t\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\n\t\t// The Browser also has localStorage in the global context.\n\t\treturn localStorage;\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\nmodule.exports = __webpack_require__(/*! ./common */ \"./node_modules/threads/node_modules/debug/src/common.js\")(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\n */\n\nformatters.j = function (v) {\n\ttry {\n\t\treturn JSON.stringify(v);\n\t} catch (error) {\n\t\treturn '[UnexpectedJSONParseError]: ' + error.message;\n\t}\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/browser.js?");
1260
1261/***/ }),
1262
1263/***/ "./node_modules/threads/node_modules/debug/src/common.js":
1264/*!***************************************************************!*\
1265 !*** ./node_modules/threads/node_modules/debug/src/common.js ***!
1266 \***************************************************************/
1267/*! no static exports found */
1268/***/ (function(module, exports, __webpack_require__) {
1269
1270eval("\n/**\n * This is the common logic for both the Node.js and web browser\n * implementations of `debug()`.\n */\n\nfunction setup(env) {\n\tcreateDebug.debug = createDebug;\n\tcreateDebug.default = createDebug;\n\tcreateDebug.coerce = coerce;\n\tcreateDebug.disable = disable;\n\tcreateDebug.enable = enable;\n\tcreateDebug.enabled = enabled;\n\tcreateDebug.humanize = __webpack_require__(/*! ms */ \"./node_modules/threads/node_modules/ms/index.js\");\n\tcreateDebug.destroy = destroy;\n\n\tObject.keys(env).forEach(key => {\n\t\tcreateDebug[key] = env[key];\n\t});\n\n\t/**\n\t* The currently active debug mode names, and names to skip.\n\t*/\n\n\tcreateDebug.names = [];\n\tcreateDebug.skips = [];\n\n\t/**\n\t* Map of special \"%n\" handling functions, for the debug \"format\" argument.\n\t*\n\t* Valid key names are a single, lower or upper-case letter, i.e. \"n\" and \"N\".\n\t*/\n\tcreateDebug.formatters = {};\n\n\t/**\n\t* Selects a color for a debug namespace\n\t* @param {String} namespace The namespace string for the for the debug instance to be colored\n\t* @return {Number|String} An ANSI color code for the given namespace\n\t* @api private\n\t*/\n\tfunction selectColor(namespace) {\n\t\tlet hash = 0;\n\n\t\tfor (let i = 0; i < namespace.length; i++) {\n\t\t\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\n\t\t\thash |= 0; // Convert to 32bit integer\n\t\t}\n\n\t\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\n\t}\n\tcreateDebug.selectColor = selectColor;\n\n\t/**\n\t* Create a debugger with the given `namespace`.\n\t*\n\t* @param {String} namespace\n\t* @return {Function}\n\t* @api public\n\t*/\n\tfunction createDebug(namespace) {\n\t\tlet prevTime;\n\t\tlet enableOverride = null;\n\t\tlet namespacesCache;\n\t\tlet enabledCache;\n\n\t\tfunction debug(...args) {\n\t\t\t// Disabled?\n\t\t\tif (!debug.enabled) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst self = debug;\n\n\t\t\t// Set `diff` timestamp\n\t\t\tconst curr = Number(new Date());\n\t\t\tconst ms = curr - (prevTime || curr);\n\t\t\tself.diff = ms;\n\t\t\tself.prev = prevTime;\n\t\t\tself.curr = curr;\n\t\t\tprevTime = curr;\n\n\t\t\targs[0] = createDebug.coerce(args[0]);\n\n\t\t\tif (typeof args[0] !== 'string') {\n\t\t\t\t// Anything else let's inspect with %O\n\t\t\t\targs.unshift('%O');\n\t\t\t}\n\n\t\t\t// Apply any `formatters` transformations\n\t\t\tlet index = 0;\n\t\t\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\n\t\t\t\t// If we encounter an escaped % then don't increase the array index\n\t\t\t\tif (match === '%%') {\n\t\t\t\t\treturn '%';\n\t\t\t\t}\n\t\t\t\tindex++;\n\t\t\t\tconst formatter = createDebug.formatters[format];\n\t\t\t\tif (typeof formatter === 'function') {\n\t\t\t\t\tconst val = args[index];\n\t\t\t\t\tmatch = formatter.call(self, val);\n\n\t\t\t\t\t// Now we need to remove `args[index]` since it's inlined in the `format`\n\t\t\t\t\targs.splice(index, 1);\n\t\t\t\t\tindex--;\n\t\t\t\t}\n\t\t\t\treturn match;\n\t\t\t});\n\n\t\t\t// Apply env-specific formatting (colors, etc.)\n\t\t\tcreateDebug.formatArgs.call(self, args);\n\n\t\t\tconst logFn = self.log || createDebug.log;\n\t\t\tlogFn.apply(self, args);\n\t\t}\n\n\t\tdebug.namespace = namespace;\n\t\tdebug.useColors = createDebug.useColors();\n\t\tdebug.color = createDebug.selectColor(namespace);\n\t\tdebug.extend = extend;\n\t\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\n\n\t\tObject.defineProperty(debug, 'enabled', {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false,\n\t\t\tget: () => {\n\t\t\t\tif (enableOverride !== null) {\n\t\t\t\t\treturn enableOverride;\n\t\t\t\t}\n\t\t\t\tif (namespacesCache !== createDebug.namespaces) {\n\t\t\t\t\tnamespacesCache = createDebug.namespaces;\n\t\t\t\t\tenabledCache = createDebug.enabled(namespace);\n\t\t\t\t}\n\n\t\t\t\treturn enabledCache;\n\t\t\t},\n\t\t\tset: v => {\n\t\t\t\tenableOverride = v;\n\t\t\t}\n\t\t});\n\n\t\t// Env-specific initialization logic for debug instances\n\t\tif (typeof createDebug.init === 'function') {\n\t\t\tcreateDebug.init(debug);\n\t\t}\n\n\t\treturn debug;\n\t}\n\n\tfunction extend(namespace, delimiter) {\n\t\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\n\t\tnewDebug.log = this.log;\n\t\treturn newDebug;\n\t}\n\n\t/**\n\t* Enables a debug mode by namespaces. This can include modes\n\t* separated by a colon and wildcards.\n\t*\n\t* @param {String} namespaces\n\t* @api public\n\t*/\n\tfunction enable(namespaces) {\n\t\tcreateDebug.save(namespaces);\n\t\tcreateDebug.namespaces = namespaces;\n\n\t\tcreateDebug.names = [];\n\t\tcreateDebug.skips = [];\n\n\t\tlet i;\n\t\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\s,]+/);\n\t\tconst len = split.length;\n\n\t\tfor (i = 0; i < len; i++) {\n\t\t\tif (!split[i]) {\n\t\t\t\t// ignore empty strings\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tnamespaces = split[i].replace(/\\*/g, '.*?');\n\n\t\t\tif (namespaces[0] === '-') {\n\t\t\t\tcreateDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));\n\t\t\t} else {\n\t\t\t\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t* Disable debug output.\n\t*\n\t* @return {String} namespaces\n\t* @api public\n\t*/\n\tfunction disable() {\n\t\tconst namespaces = [\n\t\t\t...createDebug.names.map(toNamespace),\n\t\t\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\n\t\t].join(',');\n\t\tcreateDebug.enable('');\n\t\treturn namespaces;\n\t}\n\n\t/**\n\t* Returns true if the given mode name is enabled, false otherwise.\n\t*\n\t* @param {String} name\n\t* @return {Boolean}\n\t* @api public\n\t*/\n\tfunction enabled(name) {\n\t\tif (name[name.length - 1] === '*') {\n\t\t\treturn true;\n\t\t}\n\n\t\tlet i;\n\t\tlet len;\n\n\t\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\n\t\t\tif (createDebug.skips[i].test(name)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\n\t\t\tif (createDebug.names[i].test(name)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\treturn false;\n\t}\n\n\t/**\n\t* Convert regexp to namespace\n\t*\n\t* @param {RegExp} regxep\n\t* @return {String} namespace\n\t* @api private\n\t*/\n\tfunction toNamespace(regexp) {\n\t\treturn regexp.toString()\n\t\t\t.substring(2, regexp.toString().length - 2)\n\t\t\t.replace(/\\.\\*\\?$/, '*');\n\t}\n\n\t/**\n\t* Coerce `val`.\n\t*\n\t* @param {Mixed} val\n\t* @return {Mixed}\n\t* @api private\n\t*/\n\tfunction coerce(val) {\n\t\tif (val instanceof Error) {\n\t\t\treturn val.stack || val.message;\n\t\t}\n\t\treturn val;\n\t}\n\n\t/**\n\t* XXX DO NOT USE. This is a temporary stub function.\n\t* XXX It WILL be removed in the next major release.\n\t*/\n\tfunction destroy() {\n\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t}\n\n\tcreateDebug.enable(createDebug.load());\n\n\treturn createDebug;\n}\n\nmodule.exports = setup;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/common.js?");
1271
1272/***/ }),
1273
1274/***/ "./node_modules/threads/node_modules/debug/src/index.js":
1275/*!**************************************************************!*\
1276 !*** ./node_modules/threads/node_modules/debug/src/index.js ***!
1277 \**************************************************************/
1278/*! no static exports found */
1279/***/ (function(module, exports, __webpack_require__) {
1280
1281eval("/**\n * Detect Electron renderer / nwjs process, which is node, but we should\n * treat as a browser.\n */\n\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\n\tmodule.exports = __webpack_require__(/*! ./browser.js */ \"./node_modules/threads/node_modules/debug/src/browser.js\");\n} else {\n\tmodule.exports = __webpack_require__(/*! ./node.js */ \"./node_modules/threads/node_modules/debug/src/node.js\");\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/index.js?");
1282
1283/***/ }),
1284
1285/***/ "./node_modules/threads/node_modules/debug/src/node.js":
1286/*!*************************************************************!*\
1287 !*** ./node_modules/threads/node_modules/debug/src/node.js ***!
1288 \*************************************************************/
1289/*! no static exports found */
1290/***/ (function(module, exports, __webpack_require__) {
1291
1292eval("/**\n * Module dependencies.\n */\n\nconst tty = __webpack_require__(/*! tty */ \"tty\");\nconst util = __webpack_require__(/*! util */ \"util\");\n\n/**\n * This is the Node.js implementation of `debug()`.\n */\n\nexports.init = init;\nexports.log = log;\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.destroy = util.deprecate(\n\t() => {},\n\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\n);\n\n/**\n * Colors.\n */\n\nexports.colors = [6, 2, 3, 4, 5, 1];\n\ntry {\n\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\n\t// eslint-disable-next-line import/no-extraneous-dependencies\n\tconst supportsColor = __webpack_require__(/*! supports-color */ \"./node_modules/supports-color/index.js\");\n\n\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\n\t\texports.colors = [\n\t\t\t20,\n\t\t\t21,\n\t\t\t26,\n\t\t\t27,\n\t\t\t32,\n\t\t\t33,\n\t\t\t38,\n\t\t\t39,\n\t\t\t40,\n\t\t\t41,\n\t\t\t42,\n\t\t\t43,\n\t\t\t44,\n\t\t\t45,\n\t\t\t56,\n\t\t\t57,\n\t\t\t62,\n\t\t\t63,\n\t\t\t68,\n\t\t\t69,\n\t\t\t74,\n\t\t\t75,\n\t\t\t76,\n\t\t\t77,\n\t\t\t78,\n\t\t\t79,\n\t\t\t80,\n\t\t\t81,\n\t\t\t92,\n\t\t\t93,\n\t\t\t98,\n\t\t\t99,\n\t\t\t112,\n\t\t\t113,\n\t\t\t128,\n\t\t\t129,\n\t\t\t134,\n\t\t\t135,\n\t\t\t148,\n\t\t\t149,\n\t\t\t160,\n\t\t\t161,\n\t\t\t162,\n\t\t\t163,\n\t\t\t164,\n\t\t\t165,\n\t\t\t166,\n\t\t\t167,\n\t\t\t168,\n\t\t\t169,\n\t\t\t170,\n\t\t\t171,\n\t\t\t172,\n\t\t\t173,\n\t\t\t178,\n\t\t\t179,\n\t\t\t184,\n\t\t\t185,\n\t\t\t196,\n\t\t\t197,\n\t\t\t198,\n\t\t\t199,\n\t\t\t200,\n\t\t\t201,\n\t\t\t202,\n\t\t\t203,\n\t\t\t204,\n\t\t\t205,\n\t\t\t206,\n\t\t\t207,\n\t\t\t208,\n\t\t\t209,\n\t\t\t214,\n\t\t\t215,\n\t\t\t220,\n\t\t\t221\n\t\t];\n\t}\n} catch (error) {\n\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\n}\n\n/**\n * Build up the default `inspectOpts` object from the environment variables.\n *\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\n */\n\nexports.inspectOpts = Object.keys(process.env).filter(key => {\n\treturn /^debug_/i.test(key);\n}).reduce((obj, key) => {\n\t// Camel-case\n\tconst prop = key\n\t\t.substring(6)\n\t\t.toLowerCase()\n\t\t.replace(/_([a-z])/g, (_, k) => {\n\t\t\treturn k.toUpperCase();\n\t\t});\n\n\t// Coerce string value into JS value\n\tlet val = process.env[key];\n\tif (/^(yes|on|true|enabled)$/i.test(val)) {\n\t\tval = true;\n\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\n\t\tval = false;\n\t} else if (val === 'null') {\n\t\tval = null;\n\t} else {\n\t\tval = Number(val);\n\t}\n\n\tobj[prop] = val;\n\treturn obj;\n}, {});\n\n/**\n * Is stdout a TTY? Colored output is enabled when `true`.\n */\n\nfunction useColors() {\n\treturn 'colors' in exports.inspectOpts ?\n\t\tBoolean(exports.inspectOpts.colors) :\n\t\ttty.isatty(process.stderr.fd);\n}\n\n/**\n * Adds ANSI color escape codes if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\tconst {namespace: name, useColors} = this;\n\n\tif (useColors) {\n\t\tconst c = this.color;\n\t\tconst colorCode = '\\u001B[3' + (c < 8 ? c : '8;5;' + c);\n\t\tconst prefix = ` ${colorCode};1m${name} \\u001B[0m`;\n\n\t\targs[0] = prefix + args[0].split('\\n').join('\\n' + prefix);\n\t\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\u001B[0m');\n\t} else {\n\t\targs[0] = getDate() + name + ' ' + args[0];\n\t}\n}\n\nfunction getDate() {\n\tif (exports.inspectOpts.hideDate) {\n\t\treturn '';\n\t}\n\treturn new Date().toISOString() + ' ';\n}\n\n/**\n * Invokes `util.format()` with the specified arguments and writes to stderr.\n */\n\nfunction log(...args) {\n\treturn process.stderr.write(util.format(...args) + '\\n');\n}\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\tif (namespaces) {\n\t\tprocess.env.DEBUG = namespaces;\n\t} else {\n\t\t// If you set a process.env field to null or undefined, it gets cast to the\n\t\t// string 'null' or 'undefined'. Just delete instead.\n\t\tdelete process.env.DEBUG;\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\n\nfunction load() {\n\treturn process.env.DEBUG;\n}\n\n/**\n * Init logic for `debug` instances.\n *\n * Create a new `inspectOpts` object in case `useColors` is set\n * differently for a particular `debug` instance.\n */\n\nfunction init(debug) {\n\tdebug.inspectOpts = {};\n\n\tconst keys = Object.keys(exports.inspectOpts);\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\n\t}\n}\n\nmodule.exports = __webpack_require__(/*! ./common */ \"./node_modules/threads/node_modules/debug/src/common.js\")(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %o to `util.inspect()`, all on a single line.\n */\n\nformatters.o = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts)\n\t\t.split('\\n')\n\t\t.map(str => str.trim())\n\t\t.join(' ');\n};\n\n/**\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\n */\n\nformatters.O = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts);\n};\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/node.js?");
1293
1294/***/ }),
1295
1296/***/ "./node_modules/threads/node_modules/ms/index.js":
1297/*!*******************************************************!*\
1298 !*** ./node_modules/threads/node_modules/ms/index.js ***!
1299 \*******************************************************/
1300/*! no static exports found */
1301/***/ (function(module, exports) {
1302
1303eval("/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/ms/index.js?");
1304
1305/***/ }),
1306
1307/***/ "./node_modules/tiny-worker/lib/index.js":
1308/*!***********************************************!*\
1309 !*** ./node_modules/tiny-worker/lib/index.js ***!
1310 \***********************************************/
1311/*! no static exports found */
1312/***/ (function(module, exports, __webpack_require__) {
1313
1314"use strict";
1315eval("/* WEBPACK VAR INJECTION */(function(__dirname) {\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nvar path = __webpack_require__(/*! path */ \"path\"),\n fork = __webpack_require__(/*! child_process */ \"child_process\").fork,\n worker = path.join(__dirname, \"worker.js\"),\n events = /^(error|message)$/,\n defaultPorts = { inspect: 9229, debug: 5858 };\nvar range = { min: 1, max: 300 };\n\nvar Worker = function () {\n\tfunction Worker(arg) {\n\t\tvar _this = this;\n\n\t\tvar args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];\n\t\tvar options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : { cwd: process.cwd() };\n\n\t\t_classCallCheck(this, Worker);\n\n\t\tvar isfn = typeof arg === \"function\",\n\t\t input = isfn ? arg.toString() : arg;\n\n\t\tif (!options.cwd) {\n\t\t\toptions.cwd = process.cwd();\n\t\t}\n\n\t\t//get all debug related parameters\n\t\tvar debugVars = process.execArgv.filter(function (execArg) {\n\t\t\treturn (/(debug|inspect)/.test(execArg)\n\t\t\t);\n\t\t});\n\t\tif (debugVars.length > 0 && !options.noDebugRedirection) {\n\t\t\tif (!options.execArgv) {\n\t\t\t\t//if no execArgs are given copy all arguments\n\t\t\t\tdebugVars = Array.from(process.execArgv);\n\t\t\t\toptions.execArgv = [];\n\t\t\t}\n\n\t\t\tvar inspectIndex = debugVars.findIndex(function (debugArg) {\n\t\t\t\t//get index of inspect parameter\n\t\t\t\treturn (/^--inspect(-brk)?(=\\d+)?$/.test(debugArg)\n\t\t\t\t);\n\t\t\t});\n\n\t\t\tvar debugIndex = debugVars.findIndex(function (debugArg) {\n\t\t\t\t//get index of debug parameter\n\t\t\t\treturn (/^--debug(-brk)?(=\\d+)?$/.test(debugArg)\n\t\t\t\t);\n\t\t\t});\n\n\t\t\tvar portIndex = inspectIndex >= 0 ? inspectIndex : debugIndex; //get index of port, inspect has higher priority\n\n\t\t\tif (portIndex >= 0) {\n\t\t\t\tvar match = /^--(debug|inspect)(?:-brk)?(?:=(\\d+))?$/.exec(debugVars[portIndex]); //get port\n\t\t\t\tvar port = defaultPorts[match[1]];\n\t\t\t\tif (match[2]) {\n\t\t\t\t\tport = parseInt(match[2]);\n\t\t\t\t}\n\t\t\t\tdebugVars[portIndex] = \"--\" + match[1] + \"=\" + (port + range.min + Math.floor(Math.random() * (range.max - range.min))); //new parameter\n\n\t\t\t\tif (debugIndex >= 0 && debugIndex !== portIndex) {\n\t\t\t\t\t//remove \"-brk\" from debug if there\n\t\t\t\t\tmatch = /^(--debug)(?:-brk)?(.*)/.exec(debugVars[debugIndex]);\n\t\t\t\t\tdebugVars[debugIndex] = match[1] + (match[2] ? match[2] : \"\");\n\t\t\t\t}\n\t\t\t}\n\t\t\toptions.execArgv = options.execArgv.concat(debugVars);\n\t\t}\n\n\t\tdelete options.noDebugRedirection;\n\n\t\tthis.child = fork(worker, args, options);\n\t\tthis.onerror = undefined;\n\t\tthis.onmessage = undefined;\n\n\t\tthis.child.on(\"error\", function (e) {\n\t\t\tif (_this.onerror) {\n\t\t\t\t_this.onerror.call(_this, e);\n\t\t\t}\n\t\t});\n\n\t\tthis.child.on(\"message\", function (msg) {\n\t\t\tvar message = JSON.parse(msg);\n\t\t\tvar error = void 0;\n\n\t\t\tif (!message.error && _this.onmessage) {\n\t\t\t\t_this.onmessage.call(_this, message);\n\t\t\t}\n\n\t\t\tif (message.error && _this.onerror) {\n\t\t\t\terror = new Error(message.error);\n\t\t\t\terror.stack = message.stack;\n\n\t\t\t\t_this.onerror.call(_this, error);\n\t\t\t}\n\t\t});\n\n\t\tthis.child.send({ input: input, isfn: isfn, cwd: options.cwd, esm: options.esm });\n\t}\n\n\t_createClass(Worker, [{\n\t\tkey: \"addEventListener\",\n\t\tvalue: function addEventListener(event, fn) {\n\t\t\tif (events.test(event)) {\n\t\t\t\tthis[\"on\" + event] = fn;\n\t\t\t}\n\t\t}\n\t}, {\n\t\tkey: \"postMessage\",\n\t\tvalue: function postMessage(msg) {\n\t\t\tthis.child.send(JSON.stringify({ data: msg }, null, 0));\n\t\t}\n\t}, {\n\t\tkey: \"terminate\",\n\t\tvalue: function terminate() {\n\t\t\tthis.child.kill(\"SIGINT\");\n\t\t}\n\t}], [{\n\t\tkey: \"setRange\",\n\t\tvalue: function setRange(min, max) {\n\t\t\tif (min >= max) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\trange.min = min;\n\t\t\trange.max = max;\n\n\t\t\treturn true;\n\t\t}\n\t}]);\n\n\treturn Worker;\n}();\n\nmodule.exports = Worker;\n\n/* WEBPACK VAR INJECTION */}.call(this, \"/\"))\n\n//# sourceURL=webpack://GeoRaster/./node_modules/tiny-worker/lib/index.js?");
1316
1317/***/ }),
1318
1319/***/ "./node_modules/txml/node_modules/through2/through2.js":
1320/*!*************************************************************!*\
1321 !*** ./node_modules/txml/node_modules/through2/through2.js ***!
1322 \*************************************************************/
1323/*! no static exports found */
1324/***/ (function(module, exports, __webpack_require__) {
1325
1326eval("var Transform = __webpack_require__(/*! readable-stream */ \"./node_modules/readable-stream/readable.js\").Transform\n , inherits = __webpack_require__(/*! inherits */ \"./node_modules/inherits/inherits.js\")\n\nfunction DestroyableTransform(opts) {\n Transform.call(this, opts)\n this._destroyed = false\n}\n\ninherits(DestroyableTransform, Transform)\n\nDestroyableTransform.prototype.destroy = function(err) {\n if (this._destroyed) return\n this._destroyed = true\n \n var self = this\n process.nextTick(function() {\n if (err)\n self.emit('error', err)\n self.emit('close')\n })\n}\n\n// a noop _transform function\nfunction noop (chunk, enc, callback) {\n callback(null, chunk)\n}\n\n\n// create a new export function, used by both the main export and\n// the .ctor export, contains common logic for dealing with arguments\nfunction through2 (construct) {\n return function (options, transform, flush) {\n if (typeof options == 'function') {\n flush = transform\n transform = options\n options = {}\n }\n\n if (typeof transform != 'function')\n transform = noop\n\n if (typeof flush != 'function')\n flush = null\n\n return construct(options, transform, flush)\n }\n}\n\n\n// main export, just make me a transform stream!\nmodule.exports = through2(function (options, transform, flush) {\n var t2 = new DestroyableTransform(options)\n\n t2._transform = transform\n\n if (flush)\n t2._flush = flush\n\n return t2\n})\n\n\n// make me a reusable prototype that I can `new`, or implicitly `new`\n// with a constructor call\nmodule.exports.ctor = through2(function (options, transform, flush) {\n function Through2 (override) {\n if (!(this instanceof Through2))\n return new Through2(override)\n\n this.options = Object.assign({}, options, override)\n\n DestroyableTransform.call(this, this.options)\n }\n\n inherits(Through2, DestroyableTransform)\n\n Through2.prototype._transform = transform\n\n if (flush)\n Through2.prototype._flush = flush\n\n return Through2\n})\n\n\nmodule.exports.obj = through2(function (options, transform, flush) {\n var t2 = new DestroyableTransform(Object.assign({ objectMode: true, highWaterMark: 16 }, options))\n\n t2._transform = transform\n\n if (flush)\n t2._flush = flush\n\n return t2\n})\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/txml/node_modules/through2/through2.js?");
1327
1328/***/ }),
1329
1330/***/ "./node_modules/txml/tXml.js":
1331/*!***********************************!*\
1332 !*** ./node_modules/txml/tXml.js ***!
1333 \***********************************/
1334/*! no static exports found */
1335/***/ (function(module, exports, __webpack_require__) {
1336
1337eval("// ==ClosureCompiler==\n// @output_file_name default.js\n// @compilation_level SIMPLE_OPTIMIZATIONS\n// ==/ClosureCompiler==\n\n/**\n * @author: Tobias Nickel\n * @created: 06.04.2015\n * I needed a small xmlparser chat can be used in a worker.\n */\n\n/**\n * @typedef tNode \n * @property {string} tagName \n * @property {object} [attributes] \n * @property {tNode|string|number[]} children \n **/\n\n/**\n * parseXML / html into a DOM Object. with no validation and some failur tolerance\n * @param {string} S your XML to parse\n * @param options {object} all other options:\n * searchId {string} the id of a single element, that should be returned. using this will increase the speed rapidly\n * filter {function} filter method, as you know it from Array.filter. but is goes throw the DOM.\n\n * @return {tNode[]}\n */\nfunction tXml(S, options) {\n \"use strict\";\n options = options || {};\n\n var pos = options.pos || 0;\n\n var openBracket = \"<\";\n var openBracketCC = \"<\".charCodeAt(0);\n var closeBracket = \">\";\n var closeBracketCC = \">\".charCodeAt(0);\n var minus = \"-\";\n var minusCC = \"-\".charCodeAt(0);\n var slash = \"/\";\n var slashCC = \"/\".charCodeAt(0);\n var exclamation = '!';\n var exclamationCC = '!'.charCodeAt(0);\n var singleQuote = \"'\";\n var singleQuoteCC = \"'\".charCodeAt(0);\n var doubleQuote = '\"';\n var doubleQuoteCC = '\"'.charCodeAt(0);\n\n /**\n * parsing a list of entries\n */\n function parseChildren() {\n var children = [];\n while (S[pos]) {\n if (S.charCodeAt(pos) == openBracketCC) {\n if (S.charCodeAt(pos + 1) === slashCC) {\n pos = S.indexOf(closeBracket, pos);\n if (pos + 1) pos += 1\n return children;\n } else if (S.charCodeAt(pos + 1) === exclamationCC) {\n if (S.charCodeAt(pos + 2) == minusCC) {\n //comment support\n while (pos !== -1 && !(S.charCodeAt(pos) === closeBracketCC && S.charCodeAt(pos - 1) == minusCC && S.charCodeAt(pos - 2) == minusCC && pos != -1)) {\n pos = S.indexOf(closeBracket, pos + 1);\n }\n if (pos === -1) {\n pos = S.length\n }\n } else {\n // doctypesupport\n pos += 2;\n while (S.charCodeAt(pos) !== closeBracketCC && S[pos]) {\n pos++;\n }\n }\n pos++;\n continue;\n }\n var node = parseNode();\n children.push(node);\n } else {\n var text = parseText()\n if (text.trim().length > 0)\n children.push(text);\n pos++;\n }\n }\n return children;\n }\n\n /**\n * returns the text outside of texts until the first '<'\n */\n function parseText() {\n var start = pos;\n pos = S.indexOf(openBracket, pos) - 1;\n if (pos === -2)\n pos = S.length;\n return S.slice(start, pos + 1);\n }\n /**\n * returns text until the first nonAlphebetic letter\n */\n var nameSpacer = '\\n\\t>/= ';\n\n function parseName() {\n var start = pos;\n while (nameSpacer.indexOf(S[pos]) === -1 && S[pos]) {\n pos++;\n }\n return S.slice(start, pos);\n }\n /**\n * is parsing a node, including tagName, Attributes and its children,\n * to parse children it uses the parseChildren again, that makes the parsing recursive\n */\n var NoChildNodes = options.noChildNodes || ['img', 'br', 'input', 'meta', 'link'];\n\n function parseNode() {\n pos++;\n const tagName = parseName();\n const attributes = {};\n let children = [];\n\n // parsing attributes\n while (S.charCodeAt(pos) !== closeBracketCC && S[pos]) {\n var c = S.charCodeAt(pos);\n if ((c > 64 && c < 91) || (c > 96 && c < 123)) {\n //if('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'.indexOf(S[pos])!==-1 ){\n var name = parseName();\n // search beginning of the string\n var code = S.charCodeAt(pos);\n while (code && code !== singleQuoteCC && code !== doubleQuoteCC && !((code > 64 && code < 91) || (code > 96 && code < 123)) && code !== closeBracketCC) {\n pos++;\n code = S.charCodeAt(pos);\n }\n if (code === singleQuoteCC || code === doubleQuoteCC) {\n var value = parseString();\n if (pos === -1) {\n return {\n tagName,\n attributes,\n children,\n };\n }\n } else {\n value = null;\n pos--;\n }\n attributes[name] = value;\n }\n pos++;\n }\n // optional parsing of children\n if (S.charCodeAt(pos - 1) !== slashCC) {\n if (tagName == \"script\") {\n var start = pos + 1;\n pos = S.indexOf('</script>', pos);\n children = [S.slice(start, pos - 1)];\n pos += 9;\n } else if (tagName == \"style\") {\n var start = pos + 1;\n pos = S.indexOf('</style>', pos);\n children = [S.slice(start, pos - 1)];\n pos += 8;\n } else if (NoChildNodes.indexOf(tagName) == -1) {\n pos++;\n children = parseChildren(name);\n }\n } else {\n pos++;\n }\n return {\n tagName,\n attributes,\n children,\n };\n }\n\n /**\n * is parsing a string, that starts with a char and with the same usually ' or \"\n */\n\n function parseString() {\n var startChar = S[pos];\n var startpos = ++pos;\n pos = S.indexOf(startChar, startpos)\n return S.slice(startpos, pos);\n }\n\n /**\n *\n */\n function findElements() {\n var r = new RegExp('\\\\s' + options.attrName + '\\\\s*=[\\'\"]' + options.attrValue + '[\\'\"]').exec(S)\n if (r) {\n return r.index;\n } else {\n return -1;\n }\n }\n\n var out = null;\n if (options.attrValue !== undefined) {\n options.attrName = options.attrName || 'id';\n var out = [];\n\n while ((pos = findElements()) !== -1) {\n pos = S.lastIndexOf('<', pos);\n if (pos !== -1) {\n out.push(parseNode());\n }\n S = S.substr(pos);\n pos = 0;\n }\n } else if (options.parseNode) {\n out = parseNode()\n } else {\n out = parseChildren();\n }\n\n if (options.filter) {\n out = tXml.filter(out, options.filter);\n }\n\n if (options.setPos) {\n out.pos = pos;\n }\n\n return out;\n}\n\n/**\n * transform the DomObject to an object that is like the object of PHPs simplexmp_load_*() methods.\n * this format helps you to write that is more likely to keep your programm working, even if there a small changes in the XML schema.\n * be aware, that it is not possible to reproduce the original xml from a simplified version, because the order of elements is not saved.\n * therefore your programm will be more flexible and easyer to read.\n *\n * @param {tNode[]} children the childrenList\n */\ntXml.simplify = function simplify(children) {\n var out = {};\n if (!children.length) {\n return '';\n }\n\n if (children.length === 1 && typeof children[0] == 'string') {\n return children[0];\n }\n // map each object\n children.forEach(function(child) {\n if (typeof child !== 'object') {\n return;\n }\n if (!out[child.tagName])\n out[child.tagName] = [];\n var kids = tXml.simplify(child.children||[]);\n out[child.tagName].push(kids);\n if (child.attributes) {\n kids._attributes = child.attributes;\n }\n });\n\n for (var i in out) {\n if (out[i].length == 1) {\n out[i] = out[i][0];\n }\n }\n\n return out;\n};\n\n/**\n * behaves the same way as Array.filter, if the filter method return true, the element is in the resultList\n * @params children{Array} the children of a node\n * @param f{function} the filter method\n */\ntXml.filter = function(children, f) {\n var out = [];\n children.forEach(function(child) {\n if (typeof(child) === 'object' && f(child)) out.push(child);\n if (child.children) {\n var kids = tXml.filter(child.children, f);\n out = out.concat(kids);\n }\n });\n return out;\n};\n\n/**\n * stringify a previously parsed string object.\n * this is useful,\n * 1. to remove whitespaces\n * 2. to recreate xml data, with some changed data.\n * @param {tNode} O the object to Stringify\n */\ntXml.stringify = function TOMObjToXML(O) {\n var out = '';\n\n function writeChildren(O) {\n if (O)\n for (var i = 0; i < O.length; i++) {\n if (typeof O[i] == 'string') {\n out += O[i].trim();\n } else {\n writeNode(O[i]);\n }\n }\n }\n\n function writeNode(N) {\n out += \"<\" + N.tagName;\n for (var i in N.attributes) {\n if (N.attributes[i] === null) {\n out += ' ' + i;\n } else if (N.attributes[i].indexOf('\"') === -1) {\n out += ' ' + i + '=\"' + N.attributes[i].trim() + '\"';\n } else {\n out += ' ' + i + \"='\" + N.attributes[i].trim() + \"'\";\n }\n }\n out += '>';\n writeChildren(N.children);\n out += '</' + N.tagName + '>';\n }\n writeChildren(O);\n\n return out;\n};\n\n\n/**\n * use this method to read the textcontent, of some node.\n * It is great if you have mixed content like:\n * this text has some <b>big</b> text and a <a href=''>link</a>\n * @return {string}\n */\ntXml.toContentString = function(tDom) {\n if (Array.isArray(tDom)) {\n var out = '';\n tDom.forEach(function(e) {\n out += ' ' + tXml.toContentString(e);\n out = out.trim();\n });\n return out;\n } else if (typeof tDom === 'object') {\n return tXml.toContentString(tDom.children)\n } else {\n return ' ' + tDom;\n }\n};\n\ntXml.getElementById = function(S, id, simplified) {\n var out = tXml(S, {\n attrValue: id\n });\n return simplified ? tXml.simplify(out) : out[0];\n};\n/**\n * A fast parsing method, that not realy finds by classname,\n * more: the class attribute contains XXX\n * @param\n */\ntXml.getElementsByClassName = function(S, classname, simplified) {\n const out = tXml(S, {\n attrName: 'class',\n attrValue: '[a-zA-Z0-9\\-\\s ]*' + classname + '[a-zA-Z0-9\\-\\s ]*'\n });\n return simplified ? tXml.simplify(out) : out;\n};\n\ntXml.parseStream = function(stream, offset) {\n if (typeof offset === 'string') {\n offset = offset.length + 2;\n }\n if (typeof stream === 'string') {\n var fs = __webpack_require__(/*! fs */ \"fs\");\n stream = fs.createReadStream(stream, { start: offset });\n offset = 0;\n }\n\n var position = offset;\n var data = '';\n stream.on('data', function(chunk) {\n data += chunk;\n var lastPos = 0;\n do {\n position = data.indexOf('<', position) + 1;\n if(!position) {\n position = lastPos;\n return;\n }\n if (data[position + 1] === '/') {\n position = position + 1;\n lastPos = pos;\n continue;\n }\n var res = tXml(data, { pos: position-1, parseNode: true, setPos: true });\n position = res.pos;\n if (position > (data.length - 1) || position < lastPos) {\n data = data.slice(lastPos);\n position = 0;\n lastPos = 0;\n return;\n } else {\n stream.emit('xml', res);\n lastPos = position;\n }\n } while (1);\n });\n stream.on('end', function() {\n console.log('end')\n });\n return stream;\n}\n\ntXml.transformStream = function (offset) {\n // require through here, so it will not get added to webpack/browserify\n const through2 = __webpack_require__(/*! through2 */ \"./node_modules/txml/node_modules/through2/through2.js\");\n if (typeof offset === 'string') {\n offset = offset.length + 2;\n }\n\n var position = offset || 0;\n var data = '';\n const stream = through2({ readableObjectMode: true }, function (chunk, enc, callback) {\n data += chunk;\n var lastPos = 0;\n do {\n position = data.indexOf('<', position) + 1;\n if (!position) {\n position = lastPos;\n return callback();;\n }\n if (data[position + 1] === '/') {\n position = position + 1;\n lastPos = pos;\n continue;\n }\n var res = tXml(data, { pos: position - 1, parseNode: true, setPos: true });\n position = res.pos;\n if (position > (data.length - 1) || position < lastPos) {\n data = data.slice(lastPos);\n position = 0;\n lastPos = 0;\n return callback();;\n } else {\n this.push(res);\n lastPos = position;\n }\n } while (1);\n callback();\n });\n\n return stream;\n}\n\nif (true) {\n module.exports = tXml;\n tXml.xml = tXml;\n}\n//console.clear();\n//console.log('here:',tXml.getElementById('<some><xml id=\"test\">dada</xml><that id=\"test\">value</that></some>','test'));\n//console.log('here:',tXml.getElementsByClassName('<some><xml id=\"test\" class=\"sdf test jsalf\">dada</xml><that id=\"test\">value</that></some>','test'));\n\n/*\nconsole.clear();\ntXml(d,'content');\n //some testCode\nvar s = document.body.innerHTML.toLowerCase();\nvar start = new Date().getTime();\nvar o = tXml(s,'content');\nvar end = new Date().getTime();\n//console.log(JSON.stringify(o,undefined,'\\t'));\nconsole.log(\"MILLISECONDS\",end-start);\nvar nodeCount=document.querySelectorAll('*').length;\nconsole.log('node count',nodeCount);\nconsole.log(\"speed:\",(1000/(end-start))*nodeCount,'Nodes / second')\n//console.log(JSON.stringify(tXml('<html><head><title>testPage</title></head><body><h1>TestPage</h1><p>this is a <b>test</b>page</p></body></html>'),undefined,'\\t'));\nvar p = new DOMParser();\nvar s2='<body>'+s+'</body>'\nvar start2= new Date().getTime();\nvar o2 = p.parseFromString(s2,'text/html').querySelector('#content')\nvar end2=new Date().getTime();\nconsole.log(\"MILLISECONDS\",end2-start2);\n// */\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/txml/tXml.js?");
1338
1339/***/ }),
1340
1341/***/ "./node_modules/util-deprecate/node.js":
1342/*!*********************************************!*\
1343 !*** ./node_modules/util-deprecate/node.js ***!
1344 \*********************************************/
1345/*! no static exports found */
1346/***/ (function(module, exports, __webpack_require__) {
1347
1348eval("\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = __webpack_require__(/*! util */ \"util\").deprecate;\n\n\n//# sourceURL=webpack://GeoRaster/./node_modules/util-deprecate/node.js?");
1349
1350/***/ }),
1351
1352/***/ "./node_modules/worker-loader/dist/workers/InlineWorker.js":
1353/*!*****************************************************************!*\
1354 !*** ./node_modules/worker-loader/dist/workers/InlineWorker.js ***!
1355 \*****************************************************************/
1356/*! no static exports found */
1357/***/ (function(module, exports, __webpack_require__) {
1358
1359"use strict";
1360eval("\n\n// http://stackoverflow.com/questions/10343913/how-to-create-a-web-worker-from-a-string\n\nvar URL = window.URL || window.webkitURL;\n\nmodule.exports = function (content, url) {\n try {\n try {\n var blob;\n\n try {\n // BlobBuilder = Deprecated, but widely implemented\n var BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder;\n\n blob = new BlobBuilder();\n\n blob.append(content);\n\n blob = blob.getBlob();\n } catch (e) {\n // The proposed API\n blob = new Blob([content]);\n }\n\n return new Worker(URL.createObjectURL(blob));\n } catch (e) {\n return new Worker('data:application/javascript,' + encodeURIComponent(content));\n }\n } catch (e) {\n if (!url) {\n throw Error('Inline worker is not supported');\n }\n\n return new Worker(url);\n }\n};\n\n//# sourceURL=webpack://GeoRaster/./node_modules/worker-loader/dist/workers/InlineWorker.js?");
1361
1362/***/ }),
1363
1364/***/ "./src/index.js":
1365/*!**********************!*\
1366 !*** ./src/index.js ***!
1367 \**********************/
1368/*! no static exports found */
1369/***/ (function(module, exports, __webpack_require__) {
1370
1371"use strict";
1372eval("\n/* global Blob */\n/* global URL */\n\nvar _typeof = typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; };\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _crossFetch = __webpack_require__(/*! cross-fetch */ \"./node_modules/cross-fetch/dist/node-ponyfill.js\");\n\nvar _crossFetch2 = _interopRequireDefault(_crossFetch);\n\nvar _worker = __webpack_require__(/*! ./worker.js */ \"./src/worker.js\");\n\nvar _worker2 = _interopRequireDefault(_worker);\n\nvar _parseData = __webpack_require__(/*! ./parseData.js */ \"./src/parseData.js\");\n\nvar _parseData2 = _interopRequireDefault(_parseData);\n\nvar _utils = __webpack_require__(/*! ./utils.js */ \"./src/utils.js\");\n\nvar _geotiff = __webpack_require__(/*! geotiff */ \"./node_modules/geotiff/src/geotiff.js\");\n\nvar _georasterToCanvas = __webpack_require__(/*! georaster-to-canvas */ \"./node_modules/georaster-to-canvas/index.js\");\n\nvar _georasterToCanvas2 = _interopRequireDefault(_georasterToCanvas);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction urlExists(url) {\n try {\n return (0, _crossFetch2.default)(url, { method: 'HEAD' }).then(function (response) {\n return response.status === 200;\n }).catch(function (error) {\n return false;\n });\n } catch (error) {\n return Promise.resolve(false);\n }\n}\n\nfunction getValues(geotiff, options) {\n var left = options.left,\n top = options.top,\n right = options.right,\n bottom = options.bottom,\n width = options.width,\n height = options.height,\n resampleMethod = options.resampleMethod;\n // note this.image and this.geotiff both have a readRasters method;\n // they are not the same thing. use this.geotiff for experimental version\n // that reads from best overview\n\n return geotiff.readRasters({\n window: [left, top, right, bottom],\n width: width,\n height: height,\n resampleMethod: resampleMethod || 'bilinear'\n }).then(function (rasters) {\n /*\n The result appears to be an array with a width and height property set.\n We only need the values, assuming the user remembers the width and height.\n Ex: [[0,27723,...11025,12924], width: 10, height: 10]\n */\n return rasters.map(function (raster) {\n return (0, _utils.unflatten)(raster, { height: height, width: width });\n });\n });\n};\n\nvar GeoRaster = function () {\n function GeoRaster(data, metadata, debug) {\n _classCallCheck(this, GeoRaster);\n\n if (debug) console.log('starting GeoRaster.constructor with', data, metadata);\n\n this._web_worker_is_available = typeof window !== 'undefined' && window.Worker !== 'undefined';\n this._blob_is_available = typeof Blob !== 'undefined';\n this._url_is_available = typeof URL !== 'undefined';\n\n // check if should convert to buffer\n if ((typeof data === 'undefined' ? 'undefined' : _typeof(data)) === 'object' && data.constructor && data.constructor.name === 'Buffer' && Buffer.isBuffer(data) === false) {\n data = new Buffer(data);\n }\n\n if (typeof data === 'string') {\n if (debug) console.log('data is a url');\n this._data = data;\n this._url = data;\n this.rasterType = 'geotiff';\n this.sourceType = 'url';\n } else if (typeof Buffer !== 'undefined' && Buffer.isBuffer(data)) {\n // this is node\n if (debug) console.log('data is a buffer');\n this._data = data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n this.rasterType = 'geotiff';\n this.sourceType = 'Buffer';\n } else if (data instanceof ArrayBuffer) {\n // this is browser\n this._data = data;\n this.rasterType = 'geotiff';\n this.sourceType = 'ArrayBuffer';\n } else if (Array.isArray(data) && metadata) {\n this._data = data;\n this.rasterType = 'object';\n this._metadata = metadata;\n }\n\n if (debug) console.log('this after construction:', this);\n }\n\n _createClass(GeoRaster, [{\n key: 'preinitialize',\n value: function preinitialize(debug) {\n var _this = this;\n\n if (debug) console.log('starting preinitialize');\n if (this._url) {\n // initialize these outside worker to avoid weird worker error\n // I don't see how cache option is passed through with fromUrl,\n // though constantinius says it should work: https://github.com/geotiffjs/geotiff.js/issues/61\n var ovrURL = this._url + '.ovr';\n return urlExists(ovrURL).then(function (ovrExists) {\n if (debug) console.log('overview exists:', ovrExists);\n if (ovrExists) {\n return (0, _geotiff.fromUrls)(_this._url, [ovrURL], { cache: true, forceXHR: false });\n } else {\n return (0, _geotiff.fromUrl)(_this._url, { cache: true, forceXHR: false });\n }\n });\n } else {\n // no pre-initialization steps required if not using a Cloud Optimized GeoTIFF\n return Promise.resolve();\n }\n }\n }, {\n key: 'initialize',\n value: function initialize(debug) {\n var _this2 = this;\n\n return this.preinitialize(debug).then(function (geotiff) {\n return new Promise(function (resolve, reject) {\n if (debug) console.log('starting GeoRaster.initialize');\n if (debug) console.log('this', _this2);\n\n if (_this2.rasterType === 'object' || _this2.rasterType === 'geotiff' || _this2.rasterType === 'tiff') {\n if (_this2._web_worker_is_available) {\n var worker = new _worker2.default();\n worker.onmessage = function (e) {\n if (debug) console.log('main thread received message:', e);\n var data = e.data;\n for (var key in data) {\n _this2[key] = data[key];\n }\n if (_this2._url) {\n _this2._geotiff = geotiff;\n _this2.getValues = function (options) {\n return getValues(this._geotiff, options);\n };\n }\n _this2.toCanvas = function (options) {\n return (0, _georasterToCanvas2.default)(this, options);\n };\n resolve(_this2);\n };\n if (debug) console.log('about to postMessage');\n if (_this2._data instanceof ArrayBuffer) {\n worker.postMessage({\n data: _this2._data,\n rasterType: _this2.rasterType,\n sourceType: _this2.sourceType,\n metadata: _this2._metadata\n }, [_this2._data]);\n } else {\n worker.postMessage({\n data: _this2._data,\n rasterType: _this2.rasterType,\n sourceType: _this2.sourceType,\n metadata: _this2._metadata\n });\n }\n } else {\n if (debug) console.log('web worker is not available');\n (0, _parseData2.default)({\n data: _this2._data,\n rasterType: _this2.rasterType,\n sourceType: _this2.sourceType,\n metadata: _this2._metadata\n }, debug).then(function (result) {\n if (debug) console.log('result:', result);\n if (_this2._url) {\n result._geotiff = geotiff;\n result.getValues = function (options) {\n return getValues(this._geotiff, options);\n };\n }\n result.toCanvas = function (options) {\n return (0, _georasterToCanvas2.default)(this, options);\n };\n resolve(result);\n }).catch(reject);\n }\n } else {\n reject('couldn\\'t find a way to parse');\n }\n });\n });\n }\n }]);\n\n return GeoRaster;\n}();\n\nvar parseGeoraster = function parseGeoraster(input, metadata, debug) {\n if (debug) console.log('starting parseGeoraster with ', input, metadata);\n\n if (input === undefined) {\n var errorMessage = '[Georaster.parseGeoraster] Error. You passed in undefined to parseGeoraster. We can\\'t make a raster out of nothing!';\n throw Error(errorMessage);\n }\n\n return new GeoRaster(input, metadata, debug).initialize(debug);\n};\n\nif ( true && typeof module.exports !== 'undefined') {\n module.exports = parseGeoraster;\n}\n\n/*\n The following code allows you to use GeoRaster without requiring\n*/\nif (typeof window !== 'undefined') {\n window['parseGeoraster'] = parseGeoraster;\n} else if (typeof self !== 'undefined') {\n self['parseGeoraster'] = parseGeoraster; // jshint ignore:line\n}\n\n//# sourceURL=webpack://GeoRaster/./src/index.js?");
1373
1374/***/ }),
1375
1376/***/ "./src/parseData.js":
1377/*!**************************!*\
1378 !*** ./src/parseData.js ***!
1379 \**************************/
1380/*! no static exports found */
1381/***/ (function(module, exports, __webpack_require__) {
1382
1383"use strict";
1384eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\n\nvar _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i[\"return\"]) _i[\"return\"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError(\"Invalid attempt to destructure non-iterable instance\"); } }; }();\n\nvar _typeof = typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; };\n\nexports.default = parseData;\n\nvar _geotiff = __webpack_require__(/*! geotiff */ \"./node_modules/geotiff/src/geotiff.js\");\n\nvar _geotiffPalette = __webpack_require__(/*! geotiff-palette */ \"./node_modules/geotiff-palette/index.js\");\n\nvar _utils = __webpack_require__(/*! ./utils.js */ \"./src/utils.js\");\n\nfunction processResult(result, debug) {\n var noDataValue = result.noDataValue;\n var height = result.height;\n var width = result.width;\n\n return new Promise(function (resolve, reject) {\n result.maxs = [];\n result.mins = [];\n result.ranges = [];\n\n var max = void 0;var min = void 0;\n\n // console.log(\"starting to get min, max and ranges\");\n for (var rasterIndex = 0; rasterIndex < result.numberOfRasters; rasterIndex++) {\n var rows = result.values[rasterIndex];\n if (debug) console.log('[georaster] rows:', rows);\n\n for (var rowIndex = 0; rowIndex < height; rowIndex++) {\n var row = rows[rowIndex];\n\n for (var columnIndex = 0; columnIndex < width; columnIndex++) {\n var value = row[columnIndex];\n if (value != noDataValue && !isNaN(value)) {\n if (typeof min === 'undefined' || value < min) min = value;else if (typeof max === 'undefined' || value > max) max = value;\n }\n }\n }\n\n result.maxs.push(max);\n result.mins.push(min);\n result.ranges.push(max - min);\n }\n\n resolve(result);\n });\n}\n\n/* We're not using async because trying to avoid dependency on babel's polyfill\nThere can be conflicts when GeoRaster is used in another project that is also\nusing @babel/polyfill */\nfunction parseData(data, debug) {\n return new Promise(function (resolve, reject) {\n try {\n if (debug) console.log('starting parseData with', data);\n if (debug) console.log('\\tGeoTIFF:', typeof GeoTIFF === 'undefined' ? 'undefined' : _typeof(GeoTIFF));\n\n var result = {};\n\n var height = void 0,\n width = void 0;\n\n if (data.rasterType === 'object') {\n result.values = data.data;\n result.height = height = data.metadata.height || result.values[0].length;\n result.width = width = data.metadata.width || result.values[0][0].length;\n result.pixelHeight = data.metadata.pixelHeight;\n result.pixelWidth = data.metadata.pixelWidth;\n result.projection = data.metadata.projection;\n result.xmin = data.metadata.xmin;\n result.ymax = data.metadata.ymax;\n result.noDataValue = data.metadata.noDataValue;\n result.numberOfRasters = result.values.length;\n result.xmax = result.xmin + result.width * result.pixelWidth;\n result.ymin = result.ymax - result.height * result.pixelHeight;\n result._data = null;\n resolve(processResult(result));\n } else if (data.rasterType === 'geotiff') {\n result._data = data.data;\n\n var initFunction = _geotiff.fromArrayBuffer;\n if (data.sourceType === 'url') {\n initFunction = _geotiff.fromUrl;\n }\n\n if (debug) console.log('data.rasterType is geotiff');\n resolve(initFunction(data.data).then(function (geotiff) {\n if (debug) console.log('geotiff:', geotiff);\n return geotiff.getImage().then(function (image) {\n try {\n if (debug) console.log('image:', image);\n\n var fileDirectory = image.fileDirectory;\n\n var _image$getGeoKeys = image.getGeoKeys(),\n GeographicTypeGeoKey = _image$getGeoKeys.GeographicTypeGeoKey,\n ProjectedCSTypeGeoKey = _image$getGeoKeys.ProjectedCSTypeGeoKey;\n\n result.projection = ProjectedCSTypeGeoKey || GeographicTypeGeoKey;\n if (debug) console.log('projection:', result.projection);\n\n result.height = height = image.getHeight();\n if (debug) console.log('result.height:', result.height);\n result.width = width = image.getWidth();\n if (debug) console.log('result.width:', result.width);\n\n var _image$getResolution = image.getResolution(),\n _image$getResolution2 = _slicedToArray(_image$getResolution, 2),\n resolutionX = _image$getResolution2[0],\n resolutionY = _image$getResolution2[1];\n\n result.pixelHeight = Math.abs(resolutionY);\n result.pixelWidth = Math.abs(resolutionX);\n\n var _image$getOrigin = image.getOrigin(),\n _image$getOrigin2 = _slicedToArray(_image$getOrigin, 2),\n originX = _image$getOrigin2[0],\n originY = _image$getOrigin2[1];\n\n result.xmin = originX;\n result.xmax = result.xmin + width * result.pixelWidth;\n result.ymax = originY;\n result.ymin = result.ymax - height * result.pixelHeight;\n\n result.noDataValue = fileDirectory.GDAL_NODATA ? parseFloat(fileDirectory.GDAL_NODATA) : null;\n\n result.numberOfRasters = fileDirectory.SamplesPerPixel;\n\n if (fileDirectory.ColorMap) {\n result.palette = (0, _geotiffPalette.getPalette)(image);\n }\n\n if (data.sourceType !== 'url') {\n return image.readRasters().then(function (rasters) {\n result.values = rasters.map(function (valuesInOneDimension) {\n return (0, _utils.unflatten)(valuesInOneDimension, { height: height, width: width });\n });\n return processResult(result);\n });\n } else {\n return result;\n }\n } catch (error) {\n reject(error);\n console.error('[georaster] error parsing georaster:', error);\n }\n });\n }));\n }\n } catch (error) {\n reject(error);\n console.error('[georaster] error parsing georaster:', error);\n }\n });\n}\n\n//# sourceURL=webpack://GeoRaster/./src/parseData.js?");
1385
1386/***/ }),
1387
1388/***/ "./src/utils.js":
1389/*!**********************!*\
1390 !*** ./src/utils.js ***!
1391 \**********************/
1392/*! no static exports found */
1393/***/ (function(module, exports, __webpack_require__) {
1394
1395"use strict";
1396eval("\n\nfunction countIn1D(array) {\n return array.reduce(function (counts, value) {\n if (counts[value] === undefined) {\n counts[value] = 1;\n } else {\n counts[value]++;\n }\n return counts;\n }, {});\n}\n\nfunction countIn2D(rows) {\n return rows.reduce(function (counts, values) {\n values.forEach(function (value) {\n if (counts[value] === undefined) {\n counts[value] = 1;\n } else {\n counts[value]++;\n }\n });\n return counts;\n }, {});\n}\n\n/*\nTakes in a flattened one dimensional array\nrepresenting two-dimensional pixel values\nand returns an array of arrays.\n*/\nfunction unflatten(valuesInOneDimension, size) {\n var height = size.height,\n width = size.width;\n\n var valuesInTwoDimensions = [];\n for (var y = 0; y < height; y++) {\n var start = y * width;\n var end = start + width;\n valuesInTwoDimensions.push(valuesInOneDimension.slice(start, end));\n }\n return valuesInTwoDimensions;\n}\n\nmodule.exports = { countIn1D: countIn1D, countIn2D: countIn2D, unflatten: unflatten };\n\n//# sourceURL=webpack://GeoRaster/./src/utils.js?");
1397
1398/***/ }),
1399
1400/***/ "./src/worker.js":
1401/*!***********************!*\
1402 !*** ./src/worker.js ***!
1403 \***********************/
1404/*! no static exports found */
1405/***/ (function(module, exports, __webpack_require__) {
1406
1407"use strict";
1408eval("module.exports=function(){return __webpack_require__(/*! !./node_modules/worker-loader/dist/workers/InlineWorker.js */ \"./node_modules/worker-loader/dist/workers/InlineWorker.js\")(\"/******/ (function(modules) { // webpackBootstrap\\n/******/ \\t// The module cache\\n/******/ \\tvar installedModules = {};\\n/******/\\n/******/ \\t// The require function\\n/******/ \\tfunction __webpack_require__(moduleId) {\\n/******/\\n/******/ \\t\\t// Check if module is in cache\\n/******/ \\t\\tif(installedModules[moduleId]) {\\n/******/ \\t\\t\\treturn installedModules[moduleId].exports;\\n/******/ \\t\\t}\\n/******/ \\t\\t// Create a new module (and put it into the cache)\\n/******/ \\t\\tvar module = installedModules[moduleId] = {\\n/******/ \\t\\t\\ti: moduleId,\\n/******/ \\t\\t\\tl: false,\\n/******/ \\t\\t\\texports: {}\\n/******/ \\t\\t};\\n/******/\\n/******/ \\t\\t// Execute the module function\\n/******/ \\t\\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\\n/******/\\n/******/ \\t\\t// Flag the module as loaded\\n/******/ \\t\\tmodule.l = true;\\n/******/\\n/******/ \\t\\t// Return the exports of the module\\n/******/ \\t\\treturn module.exports;\\n/******/ \\t}\\n/******/\\n/******/\\n/******/ \\t// expose the modules object (__webpack_modules__)\\n/******/ \\t__webpack_require__.m = modules;\\n/******/\\n/******/ \\t// expose the module cache\\n/******/ \\t__webpack_require__.c = installedModules;\\n/******/\\n/******/ \\t// define getter function for harmony exports\\n/******/ \\t__webpack_require__.d = function(exports, name, getter) {\\n/******/ \\t\\tif(!__webpack_require__.o(exports, name)) {\\n/******/ \\t\\t\\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\\n/******/ \\t\\t}\\n/******/ \\t};\\n/******/\\n/******/ \\t// define __esModule on exports\\n/******/ \\t__webpack_require__.r = function(exports) {\\n/******/ \\t\\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\\n/******/ \\t\\t\\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\\n/******/ \\t\\t}\\n/******/ \\t\\tObject.defineProperty(exports, '__esModule', { value: true });\\n/******/ \\t};\\n/******/\\n/******/ \\t// create a fake namespace object\\n/******/ \\t// mode & 1: value is a module id, require it\\n/******/ \\t// mode & 2: merge all properties of value into the ns\\n/******/ \\t// mode & 4: return value when already ns object\\n/******/ \\t// mode & 8|1: behave like require\\n/******/ \\t__webpack_require__.t = function(value, mode) {\\n/******/ \\t\\tif(mode & 1) value = __webpack_require__(value);\\n/******/ \\t\\tif(mode & 8) return value;\\n/******/ \\t\\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\\n/******/ \\t\\tvar ns = Object.create(null);\\n/******/ \\t\\t__webpack_require__.r(ns);\\n/******/ \\t\\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\\n/******/ \\t\\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\\n/******/ \\t\\treturn ns;\\n/******/ \\t};\\n/******/\\n/******/ \\t// getDefaultExport function for compatibility with non-harmony modules\\n/******/ \\t__webpack_require__.n = function(module) {\\n/******/ \\t\\tvar getter = module && module.__esModule ?\\n/******/ \\t\\t\\tfunction getDefault() { return module['default']; } :\\n/******/ \\t\\t\\tfunction getModuleExports() { return module; };\\n/******/ \\t\\t__webpack_require__.d(getter, 'a', getter);\\n/******/ \\t\\treturn getter;\\n/******/ \\t};\\n/******/\\n/******/ \\t// Object.prototype.hasOwnProperty.call\\n/******/ \\t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\\n/******/\\n/******/ \\t// __webpack_public_path__\\n/******/ \\t__webpack_require__.p = \\\"\\\";\\n/******/\\n/******/\\n/******/ \\t// Load entry module and return exports\\n/******/ \\treturn __webpack_require__(__webpack_require__.s = \\\"./src/worker.js\\\");\\n/******/ })\\n/************************************************************************/\\n/******/ ({\\n\\n/***/ \\\"./node_modules/callsites/index.js\\\":\\n/*!*****************************************!*\\\\\\n !*** ./node_modules/callsites/index.js ***!\\n \\\\*****************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nconst callsites = () => {\\\\n\\\\tconst _prepareStackTrace = Error.prepareStackTrace;\\\\n\\\\tError.prepareStackTrace = (_, stack) => stack;\\\\n\\\\tconst stack = new Error().stack.slice(1);\\\\n\\\\tError.prepareStackTrace = _prepareStackTrace;\\\\n\\\\treturn stack;\\\\n};\\\\n\\\\nmodule.exports = callsites;\\\\n// TODO: Remove this for the next major release\\\\nmodule.exports.default = callsites;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/callsites/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/core-util-is/lib/util.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/core-util-is/lib/util.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n// NOTE: These type checking functions intentionally don't use `instanceof`\\\\n// because it is fragile and can be easily faked with `Object.create()`.\\\\n\\\\nfunction isArray(arg) {\\\\n if (Array.isArray) {\\\\n return Array.isArray(arg);\\\\n }\\\\n return objectToString(arg) === '[object Array]';\\\\n}\\\\nexports.isArray = isArray;\\\\n\\\\nfunction isBoolean(arg) {\\\\n return typeof arg === 'boolean';\\\\n}\\\\nexports.isBoolean = isBoolean;\\\\n\\\\nfunction isNull(arg) {\\\\n return arg === null;\\\\n}\\\\nexports.isNull = isNull;\\\\n\\\\nfunction isNullOrUndefined(arg) {\\\\n return arg == null;\\\\n}\\\\nexports.isNullOrUndefined = isNullOrUndefined;\\\\n\\\\nfunction isNumber(arg) {\\\\n return typeof arg === 'number';\\\\n}\\\\nexports.isNumber = isNumber;\\\\n\\\\nfunction isString(arg) {\\\\n return typeof arg === 'string';\\\\n}\\\\nexports.isString = isString;\\\\n\\\\nfunction isSymbol(arg) {\\\\n return typeof arg === 'symbol';\\\\n}\\\\nexports.isSymbol = isSymbol;\\\\n\\\\nfunction isUndefined(arg) {\\\\n return arg === void 0;\\\\n}\\\\nexports.isUndefined = isUndefined;\\\\n\\\\nfunction isRegExp(re) {\\\\n return objectToString(re) === '[object RegExp]';\\\\n}\\\\nexports.isRegExp = isRegExp;\\\\n\\\\nfunction isObject(arg) {\\\\n return typeof arg === 'object' && arg !== null;\\\\n}\\\\nexports.isObject = isObject;\\\\n\\\\nfunction isDate(d) {\\\\n return objectToString(d) === '[object Date]';\\\\n}\\\\nexports.isDate = isDate;\\\\n\\\\nfunction isError(e) {\\\\n return (objectToString(e) === '[object Error]' || e instanceof Error);\\\\n}\\\\nexports.isError = isError;\\\\n\\\\nfunction isFunction(arg) {\\\\n return typeof arg === 'function';\\\\n}\\\\nexports.isFunction = isFunction;\\\\n\\\\nfunction isPrimitive(arg) {\\\\n return arg === null ||\\\\n typeof arg === 'boolean' ||\\\\n typeof arg === 'number' ||\\\\n typeof arg === 'string' ||\\\\n typeof arg === 'symbol' || // ES6 symbol\\\\n typeof arg === 'undefined';\\\\n}\\\\nexports.isPrimitive = isPrimitive;\\\\n\\\\nexports.isBuffer = __webpack_require__(/*! buffer */ \\\\\\\"buffer\\\\\\\").Buffer.isBuffer;\\\\n\\\\nfunction objectToString(o) {\\\\n return Object.prototype.toString.call(o);\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/core-util-is/lib/util.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff-palette/index.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/geotiff-palette/index.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"const getPalette = (image, { debug = false } = { debug: false }) => {\\\\n if (debug) console.log(\\\\\\\"starting getPalette with image\\\\\\\", image);\\\\n const { fileDirectory } = image;\\\\n const {\\\\n BitsPerSample,\\\\n ColorMap,\\\\n ImageLength,\\\\n ImageWidth,\\\\n PhotometricInterpretation,\\\\n SampleFormat,\\\\n SamplesPerPixel\\\\n } = fileDirectory;\\\\n\\\\n if (!ColorMap) {\\\\n throw new Error(\\\\\\\"[geotiff-palette]: the image does not contain a color map, so we can't make a palette.\\\\\\\");\\\\n }\\\\n\\\\n const count = Math.pow(2, BitsPerSample);\\\\n if (debug) console.log(\\\\\\\"[geotiff-palette]: count:\\\\\\\", count);\\\\n\\\\n const bandSize = ColorMap.length / 3;\\\\n if (debug) console.log(\\\\\\\"[geotiff-palette]: bandSize:\\\\\\\", bandSize);\\\\n\\\\n if (bandSize !== count) {\\\\n throw new Error(\\\\\\\"[geotiff-palette]: can't handle situations where the color map has more or less values than the number of possible values in a raster\\\\\\\");\\\\n }\\\\n\\\\n const greenOffset = bandSize;\\\\n const redOffset = greenOffset + bandSize;\\\\n\\\\n const result = [];\\\\n for (let i = 0; i < count; i++) {\\\\n // colorMap[mapIndex] / 65536 * 256 equals colorMap[mapIndex] / 256\\\\n // because (1 / 2^16) * (2^8) equals 1 / 2^8\\\\n result.push([\\\\n Math.floor(ColorMap[i] / 256), // red\\\\n Math.floor(ColorMap[greenOffset + i] / 256), // green\\\\n Math.floor(ColorMap[redOffset + i] / 256), // blue\\\\n 255 // alpha value is always 255\\\\n ]);\\\\n }\\\\n if (debug) console.log(\\\\\\\"[geotiff-palette]: result is \\\\\\\", result);\\\\n return result;\\\\n}\\\\n\\\\nmodule.exports = { getPalette };\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff-palette/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/basedecoder.js\\\":\\n/*!*************************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/basedecoder.js ***!\\n \\\\*************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return BaseDecoder; });\\\\n/* harmony import */ var _predictor__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../predictor */ \\\\\\\"./node_modules/geotiff/src/predictor.js\\\\\\\");\\\\n\\\\n\\\\nclass BaseDecoder {\\\\n decode(fileDirectory, buffer) {\\\\n const decoded = this.decodeBlock(buffer);\\\\n const predictor = fileDirectory.Predictor || 1;\\\\n if (predictor !== 1) {\\\\n const isTiled = !fileDirectory.StripOffsets;\\\\n const tileWidth = isTiled ? fileDirectory.TileWidth : fileDirectory.ImageWidth;\\\\n const tileHeight = isTiled ? fileDirectory.TileLength : (\\\\n fileDirectory.RowsPerStrip || fileDirectory.ImageLength\\\\n );\\\\n return Object(_predictor__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"applyPredictor\\\\\\\"])(\\\\n decoded, predictor, tileWidth, tileHeight, fileDirectory.BitsPerSample,\\\\n fileDirectory.PlanarConfiguration,\\\\n );\\\\n }\\\\n return decoded;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/basedecoder.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/deflate.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/deflate.js ***!\\n \\\\*********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return DeflateDecoder; });\\\\n/* harmony import */ var pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! pako/lib/inflate */ \\\\\\\"./node_modules/pako/lib/inflate.js\\\\\\\");\\\\n/* harmony import */ var pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./basedecoder */ \\\\\\\"./node_modules/geotiff/src/compression/basedecoder.js\\\\\\\");\\\\n\\\\n\\\\n\\\\nclass DeflateDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"] {\\\\n decodeBlock(buffer) {\\\\n return Object(pako_lib_inflate__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"inflate\\\\\\\"])(new Uint8Array(buffer)).buffer;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/deflate.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/index.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/index.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: getDecoder */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getDecoder\\\\\\\", function() { return getDecoder; });\\\\n/* harmony import */ var _raw__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./raw */ \\\\\\\"./node_modules/geotiff/src/compression/raw.js\\\\\\\");\\\\n/* harmony import */ var _lzw__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./lzw */ \\\\\\\"./node_modules/geotiff/src/compression/lzw.js\\\\\\\");\\\\n/* harmony import */ var _jpeg__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./jpeg */ \\\\\\\"./node_modules/geotiff/src/compression/jpeg.js\\\\\\\");\\\\n/* harmony import */ var _deflate__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./deflate */ \\\\\\\"./node_modules/geotiff/src/compression/deflate.js\\\\\\\");\\\\n/* harmony import */ var _packbits__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./packbits */ \\\\\\\"./node_modules/geotiff/src/compression/packbits.js\\\\\\\");\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nfunction getDecoder(fileDirectory) {\\\\n switch (fileDirectory.Compression) {\\\\n case undefined:\\\\n case 1: // no compression\\\\n return new _raw__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"]();\\\\n case 5: // LZW\\\\n return new _lzw__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"]();\\\\n case 6: // JPEG\\\\n throw new Error('old style JPEG compression is not supported.');\\\\n case 7: // JPEG\\\\n return new _jpeg__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"](fileDirectory);\\\\n case 8: // Deflate as recognized by Adobe\\\\n case 32946: // Deflate GDAL default\\\\n return new _deflate__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"default\\\\\\\"]();\\\\n case 32773: // packbits\\\\n return new _packbits__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"]();\\\\n default:\\\\n throw new Error(`Unknown compression method identifier: ${fileDirectory.Compression}`);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/jpeg.js\\\":\\n/*!******************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/jpeg.js ***!\\n \\\\******************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return JpegDecoder; });\\\\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \\\\\\\"./node_modules/geotiff/src/compression/basedecoder.js\\\\\\\");\\\\n\\\\n\\\\n/* -*- tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- /\\\\n/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */\\\\n/*\\\\n Copyright 2011 notmasteryet\\\\n Licensed under the Apache License, Version 2.0 (the \\\\\\\"License\\\\\\\");\\\\n you may not use this file except in compliance with the License.\\\\n You may obtain a copy of the License at\\\\n http://www.apache.org/licenses/LICENSE-2.0\\\\n Unless required by applicable law or agreed to in writing, software\\\\n distributed under the License is distributed on an \\\\\\\"AS IS\\\\\\\" BASIS,\\\\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\\\n See the License for the specific language governing permissions and\\\\n limitations under the License.\\\\n*/\\\\n\\\\n// - The JPEG specification can be found in the ITU CCITT Recommendation T.81\\\\n// (www.w3.org/Graphics/JPEG/itu-t81.pdf)\\\\n// - The JFIF specification can be found in the JPEG File Interchange Format\\\\n// (www.w3.org/Graphics/JPEG/jfif3.pdf)\\\\n// - The Adobe Application-Specific JPEG markers in the Supporting the DCT Filters\\\\n// in PostScript Level 2, Technical Note #5116\\\\n// (partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf)\\\\n\\\\n\\\\nconst dctZigZag = new Int32Array([\\\\n 0,\\\\n 1, 8,\\\\n 16, 9, 2,\\\\n 3, 10, 17, 24,\\\\n 32, 25, 18, 11, 4,\\\\n 5, 12, 19, 26, 33, 40,\\\\n 48, 41, 34, 27, 20, 13, 6,\\\\n 7, 14, 21, 28, 35, 42, 49, 56,\\\\n 57, 50, 43, 36, 29, 22, 15,\\\\n 23, 30, 37, 44, 51, 58,\\\\n 59, 52, 45, 38, 31,\\\\n 39, 46, 53, 60,\\\\n 61, 54, 47,\\\\n 55, 62,\\\\n 63,\\\\n]);\\\\n\\\\nconst dctCos1 = 4017; // cos(pi/16)\\\\nconst dctSin1 = 799; // sin(pi/16)\\\\nconst dctCos3 = 3406; // cos(3*pi/16)\\\\nconst dctSin3 = 2276; // sin(3*pi/16)\\\\nconst dctCos6 = 1567; // cos(6*pi/16)\\\\nconst dctSin6 = 3784; // sin(6*pi/16)\\\\nconst dctSqrt2 = 5793; // sqrt(2)\\\\nconst dctSqrt1d2 = 2896;// sqrt(2) / 2\\\\n\\\\nfunction buildHuffmanTable(codeLengths, values) {\\\\n let k = 0;\\\\n const code = [];\\\\n let length = 16;\\\\n while (length > 0 && !codeLengths[length - 1]) {\\\\n --length;\\\\n }\\\\n code.push({ children: [], index: 0 });\\\\n\\\\n let p = code[0];\\\\n let q;\\\\n for (let i = 0; i < length; i++) {\\\\n for (let j = 0; j < codeLengths[i]; j++) {\\\\n p = code.pop();\\\\n p.children[p.index] = values[k];\\\\n while (p.index > 0) {\\\\n p = code.pop();\\\\n }\\\\n p.index++;\\\\n code.push(p);\\\\n while (code.length <= i) {\\\\n code.push(q = { children: [], index: 0 });\\\\n p.children[p.index] = q.children;\\\\n p = q;\\\\n }\\\\n k++;\\\\n }\\\\n if (i + 1 < length) {\\\\n // p here points to last code\\\\n code.push(q = { children: [], index: 0 });\\\\n p.children[p.index] = q.children;\\\\n p = q;\\\\n }\\\\n }\\\\n return code[0].children;\\\\n}\\\\n\\\\nfunction decodeScan(data, initialOffset,\\\\n frame, components, resetInterval,\\\\n spectralStart, spectralEnd,\\\\n successivePrev, successive) {\\\\n const { mcusPerLine, progressive } = frame;\\\\n\\\\n const startOffset = initialOffset;\\\\n let offset = initialOffset;\\\\n let bitsData = 0;\\\\n let bitsCount = 0;\\\\n function readBit() {\\\\n if (bitsCount > 0) {\\\\n bitsCount--;\\\\n return (bitsData >> bitsCount) & 1;\\\\n }\\\\n bitsData = data[offset++];\\\\n if (bitsData === 0xFF) {\\\\n const nextByte = data[offset++];\\\\n if (nextByte) {\\\\n throw new Error(`unexpected marker: ${((bitsData << 8) | nextByte).toString(16)}`);\\\\n }\\\\n // unstuff 0\\\\n }\\\\n bitsCount = 7;\\\\n return bitsData >>> 7;\\\\n }\\\\n function decodeHuffman(tree) {\\\\n let node = tree;\\\\n let bit;\\\\n while ((bit = readBit()) !== null) { // eslint-disable-line no-cond-assign\\\\n node = node[bit];\\\\n if (typeof node === 'number') {\\\\n return node;\\\\n }\\\\n if (typeof node !== 'object') {\\\\n throw new Error('invalid huffman sequence');\\\\n }\\\\n }\\\\n return null;\\\\n }\\\\n function receive(initialLength) {\\\\n let length = initialLength;\\\\n let n = 0;\\\\n while (length > 0) {\\\\n const bit = readBit();\\\\n if (bit === null) {\\\\n return undefined;\\\\n }\\\\n n = (n << 1) | bit;\\\\n --length;\\\\n }\\\\n return n;\\\\n }\\\\n function receiveAndExtend(length) {\\\\n const n = receive(length);\\\\n if (n >= 1 << (length - 1)) {\\\\n return n;\\\\n }\\\\n return n + (-1 << length) + 1;\\\\n }\\\\n function decodeBaseline(component, zz) {\\\\n const t = decodeHuffman(component.huffmanTableDC);\\\\n const diff = t === 0 ? 0 : receiveAndExtend(t);\\\\n component.pred += diff;\\\\n zz[0] = component.pred;\\\\n let k = 1;\\\\n while (k < 64) {\\\\n const rs = decodeHuffman(component.huffmanTableAC);\\\\n const s = rs & 15;\\\\n const r = rs >> 4;\\\\n if (s === 0) {\\\\n if (r < 15) {\\\\n break;\\\\n }\\\\n k += 16;\\\\n } else {\\\\n k += r;\\\\n const z = dctZigZag[k];\\\\n zz[z] = receiveAndExtend(s);\\\\n k++;\\\\n }\\\\n }\\\\n }\\\\n function decodeDCFirst(component, zz) {\\\\n const t = decodeHuffman(component.huffmanTableDC);\\\\n const diff = t === 0 ? 0 : (receiveAndExtend(t) << successive);\\\\n component.pred += diff;\\\\n zz[0] = component.pred;\\\\n }\\\\n function decodeDCSuccessive(component, zz) {\\\\n zz[0] |= readBit() << successive;\\\\n }\\\\n let eobrun = 0;\\\\n function decodeACFirst(component, zz) {\\\\n if (eobrun > 0) {\\\\n eobrun--;\\\\n return;\\\\n }\\\\n let k = spectralStart;\\\\n const e = spectralEnd;\\\\n while (k <= e) {\\\\n const rs = decodeHuffman(component.huffmanTableAC);\\\\n const s = rs & 15;\\\\n const r = rs >> 4;\\\\n if (s === 0) {\\\\n if (r < 15) {\\\\n eobrun = receive(r) + (1 << r) - 1;\\\\n break;\\\\n }\\\\n k += 16;\\\\n } else {\\\\n k += r;\\\\n const z = dctZigZag[k];\\\\n zz[z] = receiveAndExtend(s) * (1 << successive);\\\\n k++;\\\\n }\\\\n }\\\\n }\\\\n let successiveACState = 0;\\\\n let successiveACNextValue;\\\\n function decodeACSuccessive(component, zz) {\\\\n let k = spectralStart;\\\\n const e = spectralEnd;\\\\n let r = 0;\\\\n while (k <= e) {\\\\n const z = dctZigZag[k];\\\\n const direction = zz[z] < 0 ? -1 : 1;\\\\n switch (successiveACState) {\\\\n case 0: { // initial state\\\\n const rs = decodeHuffman(component.huffmanTableAC);\\\\n const s = rs & 15;\\\\n r = rs >> 4;\\\\n if (s === 0) {\\\\n if (r < 15) {\\\\n eobrun = receive(r) + (1 << r);\\\\n successiveACState = 4;\\\\n } else {\\\\n r = 16;\\\\n successiveACState = 1;\\\\n }\\\\n } else {\\\\n if (s !== 1) {\\\\n throw new Error('invalid ACn encoding');\\\\n }\\\\n successiveACNextValue = receiveAndExtend(s);\\\\n successiveACState = r ? 2 : 3;\\\\n }\\\\n continue; // eslint-disable-line no-continue\\\\n }\\\\n case 1: // skipping r zero items\\\\n case 2:\\\\n if (zz[z]) {\\\\n zz[z] += (readBit() << successive) * direction;\\\\n } else {\\\\n r--;\\\\n if (r === 0) {\\\\n successiveACState = successiveACState === 2 ? 3 : 0;\\\\n }\\\\n }\\\\n break;\\\\n case 3: // set value for a zero item\\\\n if (zz[z]) {\\\\n zz[z] += (readBit() << successive) * direction;\\\\n } else {\\\\n zz[z] = successiveACNextValue << successive;\\\\n successiveACState = 0;\\\\n }\\\\n break;\\\\n case 4: // eob\\\\n if (zz[z]) {\\\\n zz[z] += (readBit() << successive) * direction;\\\\n }\\\\n break;\\\\n default:\\\\n break;\\\\n }\\\\n k++;\\\\n }\\\\n if (successiveACState === 4) {\\\\n eobrun--;\\\\n if (eobrun === 0) {\\\\n successiveACState = 0;\\\\n }\\\\n }\\\\n }\\\\n function decodeMcu(component, decodeFunction, mcu, row, col) {\\\\n const mcuRow = (mcu / mcusPerLine) | 0;\\\\n const mcuCol = mcu % mcusPerLine;\\\\n const blockRow = (mcuRow * component.v) + row;\\\\n const blockCol = (mcuCol * component.h) + col;\\\\n decodeFunction(component, component.blocks[blockRow][blockCol]);\\\\n }\\\\n function decodeBlock(component, decodeFunction, mcu) {\\\\n const blockRow = (mcu / component.blocksPerLine) | 0;\\\\n const blockCol = mcu % component.blocksPerLine;\\\\n decodeFunction(component, component.blocks[blockRow][blockCol]);\\\\n }\\\\n\\\\n const componentsLength = components.length;\\\\n let component;\\\\n let i;\\\\n let j;\\\\n let k;\\\\n let n;\\\\n let decodeFn;\\\\n if (progressive) {\\\\n if (spectralStart === 0) {\\\\n decodeFn = successivePrev === 0 ? decodeDCFirst : decodeDCSuccessive;\\\\n } else {\\\\n decodeFn = successivePrev === 0 ? decodeACFirst : decodeACSuccessive;\\\\n }\\\\n } else {\\\\n decodeFn = decodeBaseline;\\\\n }\\\\n\\\\n let mcu = 0;\\\\n let marker;\\\\n let mcuExpected;\\\\n if (componentsLength === 1) {\\\\n mcuExpected = components[0].blocksPerLine * components[0].blocksPerColumn;\\\\n } else {\\\\n mcuExpected = mcusPerLine * frame.mcusPerColumn;\\\\n }\\\\n\\\\n const usedResetInterval = resetInterval || mcuExpected;\\\\n\\\\n while (mcu < mcuExpected) {\\\\n // reset interval stuff\\\\n for (i = 0; i < componentsLength; i++) {\\\\n components[i].pred = 0;\\\\n }\\\\n eobrun = 0;\\\\n\\\\n if (componentsLength === 1) {\\\\n component = components[0];\\\\n for (n = 0; n < usedResetInterval; n++) {\\\\n decodeBlock(component, decodeFn, mcu);\\\\n mcu++;\\\\n }\\\\n } else {\\\\n for (n = 0; n < usedResetInterval; n++) {\\\\n for (i = 0; i < componentsLength; i++) {\\\\n component = components[i];\\\\n const { h, v } = component;\\\\n for (j = 0; j < v; j++) {\\\\n for (k = 0; k < h; k++) {\\\\n decodeMcu(component, decodeFn, mcu, j, k);\\\\n }\\\\n }\\\\n }\\\\n mcu++;\\\\n\\\\n // If we've reached our expected MCU's, stop decoding\\\\n if (mcu === mcuExpected) {\\\\n break;\\\\n }\\\\n }\\\\n }\\\\n\\\\n // find marker\\\\n bitsCount = 0;\\\\n marker = (data[offset] << 8) | data[offset + 1];\\\\n if (marker < 0xFF00) {\\\\n throw new Error('marker was not found');\\\\n }\\\\n\\\\n if (marker >= 0xFFD0 && marker <= 0xFFD7) { // RSTx\\\\n offset += 2;\\\\n } else {\\\\n break;\\\\n }\\\\n }\\\\n\\\\n return offset - startOffset;\\\\n}\\\\n\\\\nfunction buildComponentData(frame, component) {\\\\n const lines = [];\\\\n const { blocksPerLine, blocksPerColumn } = component;\\\\n const samplesPerLine = blocksPerLine << 3;\\\\n const R = new Int32Array(64);\\\\n const r = new Uint8Array(64);\\\\n\\\\n // A port of poppler's IDCT method which in turn is taken from:\\\\n // Christoph Loeffler, Adriaan Ligtenberg, George S. Moschytz,\\\\n // \\\\\\\"Practical Fast 1-D DCT Algorithms with 11 Multiplications\\\\\\\",\\\\n // IEEE Intl. Conf. on Acoustics, Speech & Signal Processing, 1989,\\\\n // 988-991.\\\\n function quantizeAndInverse(zz, dataOut, dataIn) {\\\\n const qt = component.quantizationTable;\\\\n let v0;\\\\n let v1;\\\\n let v2;\\\\n let v3;\\\\n let v4;\\\\n let v5;\\\\n let v6;\\\\n let v7;\\\\n let t;\\\\n const p = dataIn;\\\\n let i;\\\\n\\\\n // dequant\\\\n for (i = 0; i < 64; i++) {\\\\n p[i] = zz[i] * qt[i];\\\\n }\\\\n\\\\n // inverse DCT on rows\\\\n for (i = 0; i < 8; ++i) {\\\\n const row = 8 * i;\\\\n\\\\n // check for all-zero AC coefficients\\\\n if (p[1 + row] === 0 && p[2 + row] === 0 && p[3 + row] === 0\\\\n && p[4 + row] === 0 && p[5 + row] === 0 && p[6 + row] === 0\\\\n && p[7 + row] === 0) {\\\\n t = ((dctSqrt2 * p[0 + row]) + 512) >> 10;\\\\n p[0 + row] = t;\\\\n p[1 + row] = t;\\\\n p[2 + row] = t;\\\\n p[3 + row] = t;\\\\n p[4 + row] = t;\\\\n p[5 + row] = t;\\\\n p[6 + row] = t;\\\\n p[7 + row] = t;\\\\n continue; // eslint-disable-line no-continue\\\\n }\\\\n\\\\n // stage 4\\\\n v0 = ((dctSqrt2 * p[0 + row]) + 128) >> 8;\\\\n v1 = ((dctSqrt2 * p[4 + row]) + 128) >> 8;\\\\n v2 = p[2 + row];\\\\n v3 = p[6 + row];\\\\n v4 = ((dctSqrt1d2 * (p[1 + row] - p[7 + row])) + 128) >> 8;\\\\n v7 = ((dctSqrt1d2 * (p[1 + row] + p[7 + row])) + 128) >> 8;\\\\n v5 = p[3 + row] << 4;\\\\n v6 = p[5 + row] << 4;\\\\n\\\\n // stage 3\\\\n t = (v0 - v1 + 1) >> 1;\\\\n v0 = (v0 + v1 + 1) >> 1;\\\\n v1 = t;\\\\n t = ((v2 * dctSin6) + (v3 * dctCos6) + 128) >> 8;\\\\n v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 128) >> 8;\\\\n v3 = t;\\\\n t = (v4 - v6 + 1) >> 1;\\\\n v4 = (v4 + v6 + 1) >> 1;\\\\n v6 = t;\\\\n t = (v7 + v5 + 1) >> 1;\\\\n v5 = (v7 - v5 + 1) >> 1;\\\\n v7 = t;\\\\n\\\\n // stage 2\\\\n t = (v0 - v3 + 1) >> 1;\\\\n v0 = (v0 + v3 + 1) >> 1;\\\\n v3 = t;\\\\n t = (v1 - v2 + 1) >> 1;\\\\n v1 = (v1 + v2 + 1) >> 1;\\\\n v2 = t;\\\\n t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;\\\\n v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;\\\\n v7 = t;\\\\n t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;\\\\n v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;\\\\n v6 = t;\\\\n\\\\n // stage 1\\\\n p[0 + row] = v0 + v7;\\\\n p[7 + row] = v0 - v7;\\\\n p[1 + row] = v1 + v6;\\\\n p[6 + row] = v1 - v6;\\\\n p[2 + row] = v2 + v5;\\\\n p[5 + row] = v2 - v5;\\\\n p[3 + row] = v3 + v4;\\\\n p[4 + row] = v3 - v4;\\\\n }\\\\n\\\\n // inverse DCT on columns\\\\n for (i = 0; i < 8; ++i) {\\\\n const col = i;\\\\n\\\\n // check for all-zero AC coefficients\\\\n if (p[(1 * 8) + col] === 0 && p[(2 * 8) + col] === 0 && p[(3 * 8) + col] === 0\\\\n && p[(4 * 8) + col] === 0 && p[(5 * 8) + col] === 0 && p[(6 * 8) + col] === 0\\\\n && p[(7 * 8) + col] === 0) {\\\\n t = ((dctSqrt2 * dataIn[i + 0]) + 8192) >> 14;\\\\n p[(0 * 8) + col] = t;\\\\n p[(1 * 8) + col] = t;\\\\n p[(2 * 8) + col] = t;\\\\n p[(3 * 8) + col] = t;\\\\n p[(4 * 8) + col] = t;\\\\n p[(5 * 8) + col] = t;\\\\n p[(6 * 8) + col] = t;\\\\n p[(7 * 8) + col] = t;\\\\n continue; // eslint-disable-line no-continue\\\\n }\\\\n\\\\n // stage 4\\\\n v0 = ((dctSqrt2 * p[(0 * 8) + col]) + 2048) >> 12;\\\\n v1 = ((dctSqrt2 * p[(4 * 8) + col]) + 2048) >> 12;\\\\n v2 = p[(2 * 8) + col];\\\\n v3 = p[(6 * 8) + col];\\\\n v4 = ((dctSqrt1d2 * (p[(1 * 8) + col] - p[(7 * 8) + col])) + 2048) >> 12;\\\\n v7 = ((dctSqrt1d2 * (p[(1 * 8) + col] + p[(7 * 8) + col])) + 2048) >> 12;\\\\n v5 = p[(3 * 8) + col];\\\\n v6 = p[(5 * 8) + col];\\\\n\\\\n // stage 3\\\\n t = (v0 - v1 + 1) >> 1;\\\\n v0 = (v0 + v1 + 1) >> 1;\\\\n v1 = t;\\\\n t = ((v2 * dctSin6) + (v3 * dctCos6) + 2048) >> 12;\\\\n v2 = ((v2 * dctCos6) - (v3 * dctSin6) + 2048) >> 12;\\\\n v3 = t;\\\\n t = (v4 - v6 + 1) >> 1;\\\\n v4 = (v4 + v6 + 1) >> 1;\\\\n v6 = t;\\\\n t = (v7 + v5 + 1) >> 1;\\\\n v5 = (v7 - v5 + 1) >> 1;\\\\n v7 = t;\\\\n\\\\n // stage 2\\\\n t = (v0 - v3 + 1) >> 1;\\\\n v0 = (v0 + v3 + 1) >> 1;\\\\n v3 = t;\\\\n t = (v1 - v2 + 1) >> 1;\\\\n v1 = (v1 + v2 + 1) >> 1;\\\\n v2 = t;\\\\n t = ((v4 * dctSin3) + (v7 * dctCos3) + 2048) >> 12;\\\\n v4 = ((v4 * dctCos3) - (v7 * dctSin3) + 2048) >> 12;\\\\n v7 = t;\\\\n t = ((v5 * dctSin1) + (v6 * dctCos1) + 2048) >> 12;\\\\n v5 = ((v5 * dctCos1) - (v6 * dctSin1) + 2048) >> 12;\\\\n v6 = t;\\\\n\\\\n // stage 1\\\\n p[(0 * 8) + col] = v0 + v7;\\\\n p[(7 * 8) + col] = v0 - v7;\\\\n p[(1 * 8) + col] = v1 + v6;\\\\n p[(6 * 8) + col] = v1 - v6;\\\\n p[(2 * 8) + col] = v2 + v5;\\\\n p[(5 * 8) + col] = v2 - v5;\\\\n p[(3 * 8) + col] = v3 + v4;\\\\n p[(4 * 8) + col] = v3 - v4;\\\\n }\\\\n\\\\n // convert to 8-bit integers\\\\n for (i = 0; i < 64; ++i) {\\\\n const sample = 128 + ((p[i] + 8) >> 4);\\\\n if (sample < 0) {\\\\n dataOut[i] = 0;\\\\n } else if (sample > 0XFF) {\\\\n dataOut[i] = 0xFF;\\\\n } else {\\\\n dataOut[i] = sample;\\\\n }\\\\n }\\\\n }\\\\n\\\\n for (let blockRow = 0; blockRow < blocksPerColumn; blockRow++) {\\\\n const scanLine = blockRow << 3;\\\\n for (let i = 0; i < 8; i++) {\\\\n lines.push(new Uint8Array(samplesPerLine));\\\\n }\\\\n for (let blockCol = 0; blockCol < blocksPerLine; blockCol++) {\\\\n quantizeAndInverse(component.blocks[blockRow][blockCol], r, R);\\\\n\\\\n let offset = 0;\\\\n const sample = blockCol << 3;\\\\n for (let j = 0; j < 8; j++) {\\\\n const line = lines[scanLine + j];\\\\n for (let i = 0; i < 8; i++) {\\\\n line[sample + i] = r[offset++];\\\\n }\\\\n }\\\\n }\\\\n }\\\\n return lines;\\\\n}\\\\n\\\\nclass JpegStreamReader {\\\\n constructor() {\\\\n this.jfif = null;\\\\n this.adobe = null;\\\\n\\\\n this.quantizationTables = [];\\\\n this.huffmanTablesAC = [];\\\\n this.huffmanTablesDC = [];\\\\n this.resetFrames();\\\\n }\\\\n\\\\n resetFrames() {\\\\n this.frames = [];\\\\n }\\\\n\\\\n parse(data) {\\\\n let offset = 0;\\\\n // const { length } = data;\\\\n function readUint16() {\\\\n const value = (data[offset] << 8) | data[offset + 1];\\\\n offset += 2;\\\\n return value;\\\\n }\\\\n function readDataBlock() {\\\\n const length = readUint16();\\\\n const array = data.subarray(offset, offset + length - 2);\\\\n offset += array.length;\\\\n return array;\\\\n }\\\\n function prepareComponents(frame) {\\\\n let maxH = 0;\\\\n let maxV = 0;\\\\n let component;\\\\n let componentId;\\\\n for (componentId in frame.components) {\\\\n if (frame.components.hasOwnProperty(componentId)) {\\\\n component = frame.components[componentId];\\\\n if (maxH < component.h) {\\\\n maxH = component.h;\\\\n }\\\\n if (maxV < component.v) {\\\\n maxV = component.v;\\\\n }\\\\n }\\\\n }\\\\n const mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / maxH);\\\\n const mcusPerColumn = Math.ceil(frame.scanLines / 8 / maxV);\\\\n for (componentId in frame.components) {\\\\n if (frame.components.hasOwnProperty(componentId)) {\\\\n component = frame.components[componentId];\\\\n const blocksPerLine = Math.ceil(Math.ceil(frame.samplesPerLine / 8) * component.h / maxH);\\\\n const blocksPerColumn = Math.ceil(Math.ceil(frame.scanLines / 8) * component.v / maxV);\\\\n const blocksPerLineForMcu = mcusPerLine * component.h;\\\\n const blocksPerColumnForMcu = mcusPerColumn * component.v;\\\\n const blocks = [];\\\\n for (let i = 0; i < blocksPerColumnForMcu; i++) {\\\\n const row = [];\\\\n for (let j = 0; j < blocksPerLineForMcu; j++) {\\\\n row.push(new Int32Array(64));\\\\n }\\\\n blocks.push(row);\\\\n }\\\\n component.blocksPerLine = blocksPerLine;\\\\n component.blocksPerColumn = blocksPerColumn;\\\\n component.blocks = blocks;\\\\n }\\\\n }\\\\n frame.maxH = maxH;\\\\n frame.maxV = maxV;\\\\n frame.mcusPerLine = mcusPerLine;\\\\n frame.mcusPerColumn = mcusPerColumn;\\\\n }\\\\n\\\\n let fileMarker = readUint16();\\\\n if (fileMarker !== 0xFFD8) { // SOI (Start of Image)\\\\n throw new Error('SOI not found');\\\\n }\\\\n\\\\n fileMarker = readUint16();\\\\n while (fileMarker !== 0xFFD9) { // EOI (End of image)\\\\n switch (fileMarker) {\\\\n case 0xFF00: break;\\\\n case 0xFFE0: // APP0 (Application Specific)\\\\n case 0xFFE1: // APP1\\\\n case 0xFFE2: // APP2\\\\n case 0xFFE3: // APP3\\\\n case 0xFFE4: // APP4\\\\n case 0xFFE5: // APP5\\\\n case 0xFFE6: // APP6\\\\n case 0xFFE7: // APP7\\\\n case 0xFFE8: // APP8\\\\n case 0xFFE9: // APP9\\\\n case 0xFFEA: // APP10\\\\n case 0xFFEB: // APP11\\\\n case 0xFFEC: // APP12\\\\n case 0xFFED: // APP13\\\\n case 0xFFEE: // APP14\\\\n case 0xFFEF: // APP15\\\\n case 0xFFFE: { // COM (Comment)\\\\n const appData = readDataBlock();\\\\n\\\\n if (fileMarker === 0xFFE0) {\\\\n if (appData[0] === 0x4A && appData[1] === 0x46 && appData[2] === 0x49\\\\n && appData[3] === 0x46 && appData[4] === 0) { // 'JFIF\\\\\\\\x00'\\\\n this.jfif = {\\\\n version: { major: appData[5], minor: appData[6] },\\\\n densityUnits: appData[7],\\\\n xDensity: (appData[8] << 8) | appData[9],\\\\n yDensity: (appData[10] << 8) | appData[11],\\\\n thumbWidth: appData[12],\\\\n thumbHeight: appData[13],\\\\n thumbData: appData.subarray(14, 14 + (3 * appData[12] * appData[13])),\\\\n };\\\\n }\\\\n }\\\\n // TODO APP1 - Exif\\\\n if (fileMarker === 0xFFEE) {\\\\n if (appData[0] === 0x41 && appData[1] === 0x64 && appData[2] === 0x6F\\\\n && appData[3] === 0x62 && appData[4] === 0x65 && appData[5] === 0) { // 'Adobe\\\\\\\\x00'\\\\n this.adobe = {\\\\n version: appData[6],\\\\n flags0: (appData[7] << 8) | appData[8],\\\\n flags1: (appData[9] << 8) | appData[10],\\\\n transformCode: appData[11],\\\\n };\\\\n }\\\\n }\\\\n break;\\\\n }\\\\n\\\\n case 0xFFDB: { // DQT (Define Quantization Tables)\\\\n const quantizationTablesLength = readUint16();\\\\n const quantizationTablesEnd = quantizationTablesLength + offset - 2;\\\\n while (offset < quantizationTablesEnd) {\\\\n const quantizationTableSpec = data[offset++];\\\\n const tableData = new Int32Array(64);\\\\n if ((quantizationTableSpec >> 4) === 0) { // 8 bit values\\\\n for (let j = 0; j < 64; j++) {\\\\n const z = dctZigZag[j];\\\\n tableData[z] = data[offset++];\\\\n }\\\\n } else if ((quantizationTableSpec >> 4) === 1) { // 16 bit\\\\n for (let j = 0; j < 64; j++) {\\\\n const z = dctZigZag[j];\\\\n tableData[z] = readUint16();\\\\n }\\\\n } else {\\\\n throw new Error('DQT: invalid table spec');\\\\n }\\\\n this.quantizationTables[quantizationTableSpec & 15] = tableData;\\\\n }\\\\n break;\\\\n }\\\\n\\\\n case 0xFFC0: // SOF0 (Start of Frame, Baseline DCT)\\\\n case 0xFFC1: // SOF1 (Start of Frame, Extended DCT)\\\\n case 0xFFC2: { // SOF2 (Start of Frame, Progressive DCT)\\\\n readUint16(); // skip data length\\\\n const frame = {\\\\n extended: (fileMarker === 0xFFC1),\\\\n progressive: (fileMarker === 0xFFC2),\\\\n precision: data[offset++],\\\\n scanLines: readUint16(),\\\\n samplesPerLine: readUint16(),\\\\n components: {},\\\\n componentsOrder: [],\\\\n };\\\\n\\\\n const componentsCount = data[offset++];\\\\n let componentId;\\\\n // let maxH = 0;\\\\n // let maxV = 0;\\\\n for (let i = 0; i < componentsCount; i++) {\\\\n componentId = data[offset];\\\\n const h = data[offset + 1] >> 4;\\\\n const v = data[offset + 1] & 15;\\\\n const qId = data[offset + 2];\\\\n frame.componentsOrder.push(componentId);\\\\n frame.components[componentId] = {\\\\n h,\\\\n v,\\\\n quantizationIdx: qId,\\\\n };\\\\n offset += 3;\\\\n }\\\\n prepareComponents(frame);\\\\n this.frames.push(frame);\\\\n break;\\\\n }\\\\n\\\\n case 0xFFC4: { // DHT (Define Huffman Tables)\\\\n const huffmanLength = readUint16();\\\\n for (let i = 2; i < huffmanLength;) {\\\\n const huffmanTableSpec = data[offset++];\\\\n const codeLengths = new Uint8Array(16);\\\\n let codeLengthSum = 0;\\\\n for (let j = 0; j < 16; j++, offset++) {\\\\n codeLengths[j] = data[offset];\\\\n codeLengthSum += codeLengths[j];\\\\n }\\\\n const huffmanValues = new Uint8Array(codeLengthSum);\\\\n for (let j = 0; j < codeLengthSum; j++, offset++) {\\\\n huffmanValues[j] = data[offset];\\\\n }\\\\n i += 17 + codeLengthSum;\\\\n\\\\n if ((huffmanTableSpec >> 4) === 0) {\\\\n this.huffmanTablesDC[huffmanTableSpec & 15] = buildHuffmanTable(\\\\n codeLengths, huffmanValues,\\\\n );\\\\n } else {\\\\n this.huffmanTablesAC[huffmanTableSpec & 15] = buildHuffmanTable(\\\\n codeLengths, huffmanValues,\\\\n );\\\\n }\\\\n }\\\\n break;\\\\n }\\\\n\\\\n case 0xFFDD: // DRI (Define Restart Interval)\\\\n readUint16(); // skip data length\\\\n this.resetInterval = readUint16();\\\\n break;\\\\n\\\\n case 0xFFDA: { // SOS (Start of Scan)\\\\n readUint16(); // skip length\\\\n const selectorsCount = data[offset++];\\\\n const components = [];\\\\n const frame = this.frames[0];\\\\n for (let i = 0; i < selectorsCount; i++) {\\\\n const component = frame.components[data[offset++]];\\\\n const tableSpec = data[offset++];\\\\n component.huffmanTableDC = this.huffmanTablesDC[tableSpec >> 4];\\\\n component.huffmanTableAC = this.huffmanTablesAC[tableSpec & 15];\\\\n components.push(component);\\\\n }\\\\n const spectralStart = data[offset++];\\\\n const spectralEnd = data[offset++];\\\\n const successiveApproximation = data[offset++];\\\\n const processed = decodeScan(data, offset,\\\\n frame, components, this.resetInterval,\\\\n spectralStart, spectralEnd,\\\\n successiveApproximation >> 4, successiveApproximation & 15);\\\\n offset += processed;\\\\n break;\\\\n }\\\\n\\\\n case 0xFFFF: // Fill bytes\\\\n if (data[offset] !== 0xFF) { // Avoid skipping a valid marker.\\\\n offset--;\\\\n }\\\\n break;\\\\n\\\\n default:\\\\n if (data[offset - 3] === 0xFF\\\\n && data[offset - 2] >= 0xC0 && data[offset - 2] <= 0xFE) {\\\\n // could be incorrect encoding -- last 0xFF byte of the previous\\\\n // block was eaten by the encoder\\\\n offset -= 3;\\\\n break;\\\\n }\\\\n throw new Error(`unknown JPEG marker ${fileMarker.toString(16)}`);\\\\n }\\\\n fileMarker = readUint16();\\\\n }\\\\n }\\\\n\\\\n getResult() {\\\\n const { frames } = this;\\\\n if (this.frames.length === 0) {\\\\n throw new Error('no frames were decoded');\\\\n } else if (this.frames.length > 1) {\\\\n console.warn('more than one frame is not supported');\\\\n }\\\\n\\\\n // set each frame's components quantization table\\\\n for (let i = 0; i < this.frames.length; i++) {\\\\n const cp = this.frames[i].components;\\\\n for (const j of Object.keys(cp)) {\\\\n cp[j].quantizationTable = this.quantizationTables[cp[j].quantizationIdx];\\\\n delete cp[j].quantizationIdx;\\\\n }\\\\n }\\\\n\\\\n const frame = frames[0];\\\\n const { components, componentsOrder } = frame;\\\\n const outComponents = [];\\\\n const width = frame.samplesPerLine;\\\\n const height = frame.scanLines;\\\\n\\\\n for (let i = 0; i < componentsOrder.length; i++) {\\\\n const component = components[componentsOrder[i]];\\\\n outComponents.push({\\\\n lines: buildComponentData(frame, component),\\\\n scaleX: component.h / frame.maxH,\\\\n scaleY: component.v / frame.maxV,\\\\n });\\\\n }\\\\n\\\\n const out = new Uint8Array(width * height * outComponents.length);\\\\n let oi = 0;\\\\n for (let y = 0; y < height; ++y) {\\\\n for (let x = 0; x < width; ++x) {\\\\n for (let i = 0; i < outComponents.length; ++i) {\\\\n const component = outComponents[i];\\\\n out[oi] = component.lines[0 | y * component.scaleY][0 | x * component.scaleX];\\\\n ++oi;\\\\n }\\\\n }\\\\n }\\\\n return out;\\\\n }\\\\n}\\\\n\\\\nclass JpegDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"] {\\\\n constructor(fileDirectory) {\\\\n super();\\\\n this.reader = new JpegStreamReader();\\\\n if (fileDirectory.JPEGTables) {\\\\n this.reader.parse(fileDirectory.JPEGTables);\\\\n }\\\\n }\\\\n\\\\n decodeBlock(buffer) {\\\\n this.reader.resetFrames();\\\\n this.reader.parse(new Uint8Array(buffer));\\\\n return this.reader.getResult().buffer;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/jpeg.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/lzw.js\\\":\\n/*!*****************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/lzw.js ***!\\n \\\\*****************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return LZWDecoder; });\\\\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \\\\\\\"./node_modules/geotiff/src/compression/basedecoder.js\\\\\\\");\\\\n\\\\n\\\\n\\\\nconst MIN_BITS = 9;\\\\nconst CLEAR_CODE = 256; // clear code\\\\nconst EOI_CODE = 257; // end of information\\\\nconst MAX_BYTELENGTH = 12;\\\\n\\\\nfunction getByte(array, position, length) {\\\\n const d = position % 8;\\\\n const a = Math.floor(position / 8);\\\\n const de = 8 - d;\\\\n const ef = (position + length) - ((a + 1) * 8);\\\\n let fg = (8 * (a + 2)) - (position + length);\\\\n const dg = ((a + 2) * 8) - position;\\\\n fg = Math.max(0, fg);\\\\n if (a >= array.length) {\\\\n console.warn('ran off the end of the buffer before finding EOI_CODE (end on input code)');\\\\n return EOI_CODE;\\\\n }\\\\n let chunk1 = array[a] & ((2 ** (8 - d)) - 1);\\\\n chunk1 <<= (length - de);\\\\n let chunks = chunk1;\\\\n if (a + 1 < array.length) {\\\\n let chunk2 = array[a + 1] >>> fg;\\\\n chunk2 <<= Math.max(0, (length - dg));\\\\n chunks += chunk2;\\\\n }\\\\n if (ef > 8 && a + 2 < array.length) {\\\\n const hi = ((a + 3) * 8) - (position + length);\\\\n const chunk3 = array[a + 2] >>> hi;\\\\n chunks += chunk3;\\\\n }\\\\n return chunks;\\\\n}\\\\n\\\\nfunction appendReversed(dest, source) {\\\\n for (let i = source.length - 1; i >= 0; i--) {\\\\n dest.push(source[i]);\\\\n }\\\\n return dest;\\\\n}\\\\n\\\\nfunction decompress(input) {\\\\n const dictionaryIndex = new Uint16Array(4093);\\\\n const dictionaryChar = new Uint8Array(4093);\\\\n for (let i = 0; i <= 257; i++) {\\\\n dictionaryIndex[i] = 4096;\\\\n dictionaryChar[i] = i;\\\\n }\\\\n let dictionaryLength = 258;\\\\n let byteLength = MIN_BITS;\\\\n let position = 0;\\\\n\\\\n function initDictionary() {\\\\n dictionaryLength = 258;\\\\n byteLength = MIN_BITS;\\\\n }\\\\n function getNext(array) {\\\\n const byte = getByte(array, position, byteLength);\\\\n position += byteLength;\\\\n return byte;\\\\n }\\\\n function addToDictionary(i, c) {\\\\n dictionaryChar[dictionaryLength] = c;\\\\n dictionaryIndex[dictionaryLength] = i;\\\\n dictionaryLength++;\\\\n return dictionaryLength - 1;\\\\n }\\\\n function getDictionaryReversed(n) {\\\\n const rev = [];\\\\n for (let i = n; i !== 4096; i = dictionaryIndex[i]) {\\\\n rev.push(dictionaryChar[i]);\\\\n }\\\\n return rev;\\\\n }\\\\n\\\\n const result = [];\\\\n initDictionary();\\\\n const array = new Uint8Array(input);\\\\n let code = getNext(array);\\\\n let oldCode;\\\\n while (code !== EOI_CODE) {\\\\n if (code === CLEAR_CODE) {\\\\n initDictionary();\\\\n code = getNext(array);\\\\n while (code === CLEAR_CODE) {\\\\n code = getNext(array);\\\\n }\\\\n\\\\n if (code === EOI_CODE) {\\\\n break;\\\\n } else if (code > CLEAR_CODE) {\\\\n throw new Error(`corrupted code at scanline ${code}`);\\\\n } else {\\\\n const val = getDictionaryReversed(code);\\\\n appendReversed(result, val);\\\\n oldCode = code;\\\\n }\\\\n } else if (code < dictionaryLength) {\\\\n const val = getDictionaryReversed(code);\\\\n appendReversed(result, val);\\\\n addToDictionary(oldCode, val[val.length - 1]);\\\\n oldCode = code;\\\\n } else {\\\\n const oldVal = getDictionaryReversed(oldCode);\\\\n if (!oldVal) {\\\\n throw new Error(`Bogus entry. Not in dictionary, ${oldCode} / ${dictionaryLength}, position: ${position}`);\\\\n }\\\\n appendReversed(result, oldVal);\\\\n result.push(oldVal[oldVal.length - 1]);\\\\n addToDictionary(oldCode, oldVal[oldVal.length - 1]);\\\\n oldCode = code;\\\\n }\\\\n\\\\n if (dictionaryLength + 1 >= (2 ** byteLength)) {\\\\n if (byteLength === MAX_BYTELENGTH) {\\\\n oldCode = undefined;\\\\n } else {\\\\n byteLength++;\\\\n }\\\\n }\\\\n code = getNext(array);\\\\n }\\\\n return new Uint8Array(result);\\\\n}\\\\n\\\\nclass LZWDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"] {\\\\n decodeBlock(buffer) {\\\\n return decompress(buffer, false).buffer;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/lzw.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/packbits.js\\\":\\n/*!**********************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/packbits.js ***!\\n \\\\**********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return PackbitsDecoder; });\\\\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \\\\\\\"./node_modules/geotiff/src/compression/basedecoder.js\\\\\\\");\\\\n\\\\n\\\\n\\\\nclass PackbitsDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"] {\\\\n decodeBlock(buffer) {\\\\n const dataView = new DataView(buffer);\\\\n const out = [];\\\\n\\\\n for (let i = 0; i < buffer.byteLength; ++i) {\\\\n let header = dataView.getInt8(i);\\\\n if (header < 0) {\\\\n const next = dataView.getUint8(i + 1);\\\\n header = -header;\\\\n for (let j = 0; j <= header; ++j) {\\\\n out.push(next);\\\\n }\\\\n i += 1;\\\\n } else {\\\\n for (let j = 0; j <= header; ++j) {\\\\n out.push(dataView.getUint8(i + j + 1));\\\\n }\\\\n i += header + 1;\\\\n }\\\\n }\\\\n return new Uint8Array(out).buffer;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/packbits.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/compression/raw.js\\\":\\n/*!*****************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/compression/raw.js ***!\\n \\\\*****************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return RawDecoder; });\\\\n/* harmony import */ var _basedecoder__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./basedecoder */ \\\\\\\"./node_modules/geotiff/src/compression/basedecoder.js\\\\\\\");\\\\n\\\\n\\\\n\\\\nclass RawDecoder extends _basedecoder__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"] {\\\\n decodeBlock(buffer) {\\\\n return buffer;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/compression/raw.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/dataslice.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/dataslice.js ***!\\n \\\\***********************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return DataSlice; });\\\\nclass DataSlice {\\\\n constructor(arrayBuffer, sliceOffset, littleEndian, bigTiff) {\\\\n this._dataView = new DataView(arrayBuffer);\\\\n this._sliceOffset = sliceOffset;\\\\n this._littleEndian = littleEndian;\\\\n this._bigTiff = bigTiff;\\\\n }\\\\n\\\\n get sliceOffset() {\\\\n return this._sliceOffset;\\\\n }\\\\n\\\\n get sliceTop() {\\\\n return this._sliceOffset + this.buffer.byteLength;\\\\n }\\\\n\\\\n get littleEndian() {\\\\n return this._littleEndian;\\\\n }\\\\n\\\\n get bigTiff() {\\\\n return this._bigTiff;\\\\n }\\\\n\\\\n get buffer() {\\\\n return this._dataView.buffer;\\\\n }\\\\n\\\\n covers(offset, length) {\\\\n return this.sliceOffset <= offset && this.sliceTop >= offset + length;\\\\n }\\\\n\\\\n readUint8(offset) {\\\\n return this._dataView.getUint8(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readInt8(offset) {\\\\n return this._dataView.getInt8(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readUint16(offset) {\\\\n return this._dataView.getUint16(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readInt16(offset) {\\\\n return this._dataView.getInt16(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readUint32(offset) {\\\\n return this._dataView.getUint32(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readInt32(offset) {\\\\n return this._dataView.getInt32(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readFloat32(offset) {\\\\n return this._dataView.getFloat32(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readFloat64(offset) {\\\\n return this._dataView.getFloat64(\\\\n offset - this._sliceOffset, this._littleEndian,\\\\n );\\\\n }\\\\n\\\\n readUint64(offset) {\\\\n const left = this.readUint32(offset);\\\\n const right = this.readUint32(offset + 4);\\\\n let combined;\\\\n if (this._littleEndian) {\\\\n combined = left + 2 ** 32 * right;\\\\n if (!Number.isSafeInteger(combined)) {\\\\n throw new Error(\\\\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`,\\\\n );\\\\n }\\\\n return combined;\\\\n }\\\\n combined = 2 ** 32 * left + right;\\\\n if (!Number.isSafeInteger(combined)) {\\\\n throw new Error(\\\\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`,\\\\n );\\\\n }\\\\n\\\\n return combined;\\\\n }\\\\n\\\\n // adapted from https://stackoverflow.com/a/55338384/8060591\\\\n readInt64(offset) {\\\\n let value = 0;\\\\n const isNegative =\\\\n (this._dataView.getUint8(offset + (this._littleEndian ? 7 : 0)) & 0x80) >\\\\n 0;\\\\n let carrying = true;\\\\n for (let i = 0; i < 8; i++) {\\\\n let byte = this._dataView.getUint8(\\\\n offset + (this._littleEndian ? i : 7 - i)\\\\n );\\\\n if (isNegative) {\\\\n if (carrying) {\\\\n if (byte !== 0x00) {\\\\n byte = ~(byte - 1) & 0xff;\\\\n carrying = false;\\\\n }\\\\n } else {\\\\n byte = ~byte & 0xff;\\\\n }\\\\n }\\\\n value += byte * 256 ** i;\\\\n }\\\\n if (isNegative) {\\\\n value = -value;\\\\n }\\\\n return value\\\\n }\\\\n\\\\n readOffset(offset) {\\\\n if (this._bigTiff) {\\\\n return this.readUint64(offset);\\\\n }\\\\n return this.readUint32(offset);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/dataslice.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/dataview64.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/dataview64.js ***!\\n \\\\************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return DataView64; });\\\\nclass DataView64 {\\\\n constructor(arrayBuffer) {\\\\n this._dataView = new DataView(arrayBuffer);\\\\n }\\\\n\\\\n get buffer() {\\\\n return this._dataView.buffer;\\\\n }\\\\n\\\\n getUint64(offset, littleEndian) {\\\\n const left = this.getUint32(offset, littleEndian);\\\\n const right = this.getUint32(offset + 4, littleEndian);\\\\n let combined;\\\\n if (littleEndian) {\\\\n combined = left + 2 ** 32 * right;\\\\n if (!Number.isSafeInteger(combined)) {\\\\n throw new Error(\\\\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`\\\\n );\\\\n }\\\\n return combined;\\\\n }\\\\n combined = 2 ** 32 * left + right;\\\\n if (!Number.isSafeInteger(combined)) {\\\\n throw new Error(\\\\n `${combined} exceeds MAX_SAFE_INTEGER. Precision may be lost. Please report if you get this message to https://github.com/geotiffjs/geotiff.js/issues`\\\\n );\\\\n }\\\\n\\\\n return combined;\\\\n }\\\\n\\\\n // adapted from https://stackoverflow.com/a/55338384/8060591\\\\n getInt64(offset, littleEndian) {\\\\n let value = 0;\\\\n const isNegative =\\\\n (this._dataView.getUint8(offset + (littleEndian ? 7 : 0)) & 0x80) > 0;\\\\n let carrying = true;\\\\n for (let i = 0; i < 8; i++) {\\\\n let byte = this._dataView.getUint8(offset + (littleEndian ? i : 7 - i));\\\\n if (isNegative) {\\\\n if (carrying) {\\\\n if (byte !== 0x00) {\\\\n byte = ~(byte - 1) & 0xff;\\\\n carrying = false;\\\\n }\\\\n } else {\\\\n byte = ~byte & 0xff;\\\\n }\\\\n }\\\\n value += byte * 256 ** i;\\\\n }\\\\n if (isNegative) {\\\\n value = -value;\\\\n }\\\\n return value;\\\\n }\\\\n\\\\n getUint8(offset, littleEndian) {\\\\n return this._dataView.getUint8(offset, littleEndian);\\\\n }\\\\n\\\\n getInt8(offset, littleEndian) {\\\\n return this._dataView.getInt8(offset, littleEndian);\\\\n }\\\\n\\\\n getUint16(offset, littleEndian) {\\\\n return this._dataView.getUint16(offset, littleEndian);\\\\n }\\\\n\\\\n getInt16(offset, littleEndian) {\\\\n return this._dataView.getInt16(offset, littleEndian);\\\\n }\\\\n\\\\n getUint32(offset, littleEndian) {\\\\n return this._dataView.getUint32(offset, littleEndian);\\\\n }\\\\n\\\\n getInt32(offset, littleEndian) {\\\\n return this._dataView.getInt32(offset, littleEndian);\\\\n }\\\\n\\\\n getFloat32(offset, littleEndian) {\\\\n return this._dataView.getFloat32(offset, littleEndian);\\\\n }\\\\n\\\\n getFloat64(offset, littleEndian) {\\\\n return this._dataView.getFloat64(offset, littleEndian);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/dataview64.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/geotiff.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/geotiff.js ***!\\n \\\\*********************************************/\\n/*! exports provided: globals, rgb, getDecoder, setLogger, GeoTIFF, default, MultiGeoTIFF, fromUrl, fromArrayBuffer, fromFile, fromBlob, fromUrls, writeArrayBuffer, Pool */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"GeoTIFF\\\\\\\", function() { return GeoTIFF; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"MultiGeoTIFF\\\\\\\", function() { return MultiGeoTIFF; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromUrl\\\\\\\", function() { return fromUrl; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromArrayBuffer\\\\\\\", function() { return fromArrayBuffer; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromFile\\\\\\\", function() { return fromFile; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromBlob\\\\\\\", function() { return fromBlob; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromUrls\\\\\\\", function() { return fromUrls; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"writeArrayBuffer\\\\\\\", function() { return writeArrayBuffer; });\\\\n/* harmony import */ var _geotiffimage__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./geotiffimage */ \\\\\\\"./node_modules/geotiff/src/geotiffimage.js\\\\\\\");\\\\n/* harmony import */ var _dataview64__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dataview64 */ \\\\\\\"./node_modules/geotiff/src/dataview64.js\\\\\\\");\\\\n/* harmony import */ var _dataslice__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dataslice */ \\\\\\\"./node_modules/geotiff/src/dataslice.js\\\\\\\");\\\\n/* harmony import */ var _pool__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./pool */ \\\\\\\"./node_modules/geotiff/src/pool.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Pool\\\\\\\", function() { return _pool__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _source__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./source */ \\\\\\\"./node_modules/geotiff/src/source.js\\\\\\\");\\\\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./globals */ \\\\\\\"./node_modules/geotiff/src/globals.js\\\\\\\");\\\\n/* harmony import */ var _geotiffwriter__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./geotiffwriter */ \\\\\\\"./node_modules/geotiff/src/geotiffwriter.js\\\\\\\");\\\\n/* harmony reexport (module object) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"globals\\\\\\\", function() { return _globals__WEBPACK_IMPORTED_MODULE_5__; });\\\\n/* harmony import */ var _rgb__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./rgb */ \\\\\\\"./node_modules/geotiff/src/rgb.js\\\\\\\");\\\\n/* harmony reexport (module object) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"rgb\\\\\\\", function() { return _rgb__WEBPACK_IMPORTED_MODULE_7__; });\\\\n/* harmony import */ var _compression__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./compression */ \\\\\\\"./node_modules/geotiff/src/compression/index.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getDecoder\\\\\\\", function() { return _compression__WEBPACK_IMPORTED_MODULE_8__[\\\\\\\"getDecoder\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _logging__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./logging */ \\\\\\\"./node_modules/geotiff/src/logging.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"setLogger\\\\\\\", function() { return _logging__WEBPACK_IMPORTED_MODULE_9__[\\\\\\\"setLogger\\\\\\\"]; });\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nfunction getFieldTypeLength(fieldType) {\\\\n switch (fieldType) {\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].BYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SBYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].UNDEFINED:\\\\n return 1;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SHORT: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SSHORT:\\\\n return 2;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].LONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SLONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].FLOAT: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].IFD:\\\\n return 4;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].RATIONAL: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SRATIONAL: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].DOUBLE:\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].LONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SLONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].IFD8:\\\\n return 8;\\\\n default:\\\\n throw new RangeError(`Invalid field type: ${fieldType}`);\\\\n }\\\\n}\\\\n\\\\nfunction parseGeoKeyDirectory(fileDirectory) {\\\\n const rawGeoKeyDirectory = fileDirectory.GeoKeyDirectory;\\\\n if (!rawGeoKeyDirectory) {\\\\n return null;\\\\n }\\\\n\\\\n const geoKeyDirectory = {};\\\\n for (let i = 4; i <= rawGeoKeyDirectory[3] * 4; i += 4) {\\\\n const key = _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"geoKeyNames\\\\\\\"][rawGeoKeyDirectory[i]];\\\\n const location = (rawGeoKeyDirectory[i + 1])\\\\n ? (_globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTagNames\\\\\\\"][rawGeoKeyDirectory[i + 1]]) : null;\\\\n const count = rawGeoKeyDirectory[i + 2];\\\\n const offset = rawGeoKeyDirectory[i + 3];\\\\n\\\\n let value = null;\\\\n if (!location) {\\\\n value = offset;\\\\n } else {\\\\n value = fileDirectory[location];\\\\n if (typeof value === 'undefined' || value === null) {\\\\n throw new Error(`Could not get value of geoKey '${key}'.`);\\\\n } else if (typeof value === 'string') {\\\\n value = value.substring(offset, offset + count - 1);\\\\n } else if (value.subarray) {\\\\n value = value.subarray(offset, offset + count);\\\\n if (count === 1) {\\\\n value = value[0];\\\\n }\\\\n }\\\\n }\\\\n geoKeyDirectory[key] = value;\\\\n }\\\\n return geoKeyDirectory;\\\\n}\\\\n\\\\nfunction getValues(dataSlice, fieldType, count, offset) {\\\\n let values = null;\\\\n let readMethod = null;\\\\n const fieldTypeLength = getFieldTypeLength(fieldType);\\\\n\\\\n switch (fieldType) {\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].BYTE: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].UNDEFINED:\\\\n values = new Uint8Array(count); readMethod = dataSlice.readUint8;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SBYTE:\\\\n values = new Int8Array(count); readMethod = dataSlice.readInt8;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SHORT:\\\\n values = new Uint16Array(count); readMethod = dataSlice.readUint16;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SSHORT:\\\\n values = new Int16Array(count); readMethod = dataSlice.readInt16;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].LONG: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].IFD:\\\\n values = new Uint32Array(count); readMethod = dataSlice.readUint32;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SLONG:\\\\n values = new Int32Array(count); readMethod = dataSlice.readInt32;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].LONG8: case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].IFD8:\\\\n values = new Array(count); readMethod = dataSlice.readUint64;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SLONG8:\\\\n values = new Array(count); readMethod = dataSlice.readInt64;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].RATIONAL:\\\\n values = new Uint32Array(count * 2); readMethod = dataSlice.readUint32;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SRATIONAL:\\\\n values = new Int32Array(count * 2); readMethod = dataSlice.readInt32;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].FLOAT:\\\\n values = new Float32Array(count); readMethod = dataSlice.readFloat32;\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].DOUBLE:\\\\n values = new Float64Array(count); readMethod = dataSlice.readFloat64;\\\\n break;\\\\n default:\\\\n throw new RangeError(`Invalid field type: ${fieldType}`);\\\\n }\\\\n\\\\n // normal fields\\\\n if (!(fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].RATIONAL || fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SRATIONAL)) {\\\\n for (let i = 0; i < count; ++i) {\\\\n values[i] = readMethod.call(\\\\n dataSlice, offset + (i * fieldTypeLength),\\\\n );\\\\n }\\\\n } else { // RATIONAL or SRATIONAL\\\\n for (let i = 0; i < count; i += 2) {\\\\n values[i] = readMethod.call(\\\\n dataSlice, offset + (i * fieldTypeLength),\\\\n );\\\\n values[i + 1] = readMethod.call(\\\\n dataSlice, offset + ((i * fieldTypeLength) + 4),\\\\n );\\\\n }\\\\n }\\\\n\\\\n if (fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII) {\\\\n return String.fromCharCode.apply(null, values);\\\\n }\\\\n return values;\\\\n}\\\\n\\\\n/**\\\\n * Data class to store the parsed file directory, geo key directory and\\\\n * offset to the next IFD\\\\n */\\\\nclass ImageFileDirectory {\\\\n constructor(fileDirectory, geoKeyDirectory, nextIFDByteOffset) {\\\\n this.fileDirectory = fileDirectory;\\\\n this.geoKeyDirectory = geoKeyDirectory;\\\\n this.nextIFDByteOffset = nextIFDByteOffset;\\\\n }\\\\n}\\\\n\\\\n/**\\\\n * Error class for cases when an IFD index was requested, that does not exist\\\\n * in the file.\\\\n */\\\\nclass GeoTIFFImageIndexError extends Error {\\\\n constructor(index) {\\\\n super(`No image at index ${index}`);\\\\n this.index = index;\\\\n }\\\\n}\\\\n\\\\n\\\\nclass GeoTIFFBase {\\\\n /**\\\\n * (experimental) Reads raster data from the best fitting image. This function uses\\\\n * the image with the lowest resolution that is still a higher resolution than the\\\\n * requested resolution.\\\\n * When specified, the `bbox` option is translated to the `window` option and the\\\\n * `resX` and `resY` to `width` and `height` respectively.\\\\n * Then, the [readRasters]{@link GeoTIFFImage#readRasters} method of the selected\\\\n * image is called and the result returned.\\\\n * @see GeoTIFFImage.readRasters\\\\n * @param {Object} [options={}] optional parameters\\\\n * @param {Array} [options.window=whole image] the subset to read data from.\\\\n * @param {Array} [options.bbox=whole image] the subset to read data from in\\\\n * geographical coordinates.\\\\n * @param {Array} [options.samples=all samples] the selection of samples to read from.\\\\n * @param {Boolean} [options.interleave=false] whether the data shall be read\\\\n * in one single array or separate\\\\n * arrays.\\\\n * @param {Number} [options.pool=null] The optional decoder pool to use.\\\\n * @param {Number} [options.width] The desired width of the output. When the width is not the\\\\n * same as the images, resampling will be performed.\\\\n * @param {Number} [options.height] The desired height of the output. When the width is not the\\\\n * same as the images, resampling will be performed.\\\\n * @param {String} [options.resampleMethod='nearest'] The desired resampling method.\\\\n * @param {Number|Number[]} [options.fillValue] The value to use for parts of the image\\\\n * outside of the images extent. When multiple\\\\n * samples are requested, an array of fill values\\\\n * can be passed.\\\\n * @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise\\\\n */\\\\n async readRasters(options = {}) {\\\\n const { window: imageWindow, width, height } = options;\\\\n let { resX, resY, bbox } = options;\\\\n\\\\n const firstImage = await this.getImage();\\\\n let usedImage = firstImage;\\\\n const imageCount = await this.getImageCount();\\\\n const imgBBox = firstImage.getBoundingBox();\\\\n\\\\n if (imageWindow && bbox) {\\\\n throw new Error('Both \\\\\\\"bbox\\\\\\\" and \\\\\\\"window\\\\\\\" passed.');\\\\n }\\\\n\\\\n // if width/height is passed, transform it to resolution\\\\n if (width || height) {\\\\n // if we have an image window (pixel coordinates), transform it to a BBox\\\\n // using the origin/resolution of the first image.\\\\n if (imageWindow) {\\\\n const [oX, oY] = firstImage.getOrigin();\\\\n const [rX, rY] = firstImage.getResolution();\\\\n\\\\n bbox = [\\\\n oX + (imageWindow[0] * rX),\\\\n oY + (imageWindow[1] * rY),\\\\n oX + (imageWindow[2] * rX),\\\\n oY + (imageWindow[3] * rY),\\\\n ];\\\\n }\\\\n\\\\n // if we have a bbox (or calculated one)\\\\n\\\\n const usedBBox = bbox || imgBBox;\\\\n\\\\n if (width) {\\\\n if (resX) {\\\\n throw new Error('Both width and resX passed');\\\\n }\\\\n resX = (usedBBox[2] - usedBBox[0]) / width;\\\\n }\\\\n if (height) {\\\\n if (resY) {\\\\n throw new Error('Both width and resY passed');\\\\n }\\\\n resY = (usedBBox[3] - usedBBox[1]) / height;\\\\n }\\\\n }\\\\n\\\\n // if resolution is set or calculated, try to get the image with the worst acceptable resolution\\\\n if (resX || resY) {\\\\n const allImages = [];\\\\n for (let i = 0; i < imageCount; ++i) {\\\\n const image = await this.getImage(i);\\\\n const { SubfileType: subfileType, NewSubfileType: newSubfileType } = image.fileDirectory;\\\\n if (i === 0 || subfileType === 2 || newSubfileType & 1) {\\\\n allImages.push(image);\\\\n }\\\\n }\\\\n\\\\n allImages.sort((a, b) => a.getWidth() - b.getWidth());\\\\n for (let i = 0; i < allImages.length; ++i) {\\\\n const image = allImages[i];\\\\n const imgResX = (imgBBox[2] - imgBBox[0]) / image.getWidth();\\\\n const imgResY = (imgBBox[3] - imgBBox[1]) / image.getHeight();\\\\n\\\\n usedImage = image;\\\\n if ((resX && resX > imgResX) || (resY && resY > imgResY)) {\\\\n break;\\\\n }\\\\n }\\\\n }\\\\n\\\\n let wnd = imageWindow;\\\\n if (bbox) {\\\\n const [oX, oY] = firstImage.getOrigin();\\\\n const [imageResX, imageResY] = usedImage.getResolution(firstImage);\\\\n\\\\n wnd = [\\\\n Math.round((bbox[0] - oX) / imageResX),\\\\n Math.round((bbox[1] - oY) / imageResY),\\\\n Math.round((bbox[2] - oX) / imageResX),\\\\n Math.round((bbox[3] - oY) / imageResY),\\\\n ];\\\\n wnd = [\\\\n Math.min(wnd[0], wnd[2]),\\\\n Math.min(wnd[1], wnd[3]),\\\\n Math.max(wnd[0], wnd[2]),\\\\n Math.max(wnd[1], wnd[3]),\\\\n ];\\\\n }\\\\n\\\\n return usedImage.readRasters({ ...options, window: wnd });\\\\n }\\\\n}\\\\n\\\\n\\\\n/**\\\\n * The abstraction for a whole GeoTIFF file.\\\\n * @augments GeoTIFFBase\\\\n */\\\\nclass GeoTIFF extends GeoTIFFBase {\\\\n /**\\\\n * @constructor\\\\n * @param {Source} source The datasource to read from.\\\\n * @param {Boolean} littleEndian Whether the image uses little endian.\\\\n * @param {Boolean} bigTiff Whether the image uses bigTIFF conventions.\\\\n * @param {Number} firstIFDOffset The numeric byte-offset from the start of the image\\\\n * to the first IFD.\\\\n * @param {Object} [options] further options.\\\\n * @param {Boolean} [options.cache=false] whether or not decoded tiles shall be cached.\\\\n */\\\\n constructor(source, littleEndian, bigTiff, firstIFDOffset, options = {}) {\\\\n super();\\\\n this.source = source;\\\\n this.littleEndian = littleEndian;\\\\n this.bigTiff = bigTiff;\\\\n this.firstIFDOffset = firstIFDOffset;\\\\n this.cache = options.cache || false;\\\\n this.ifdRequests = [];\\\\n this.ghostValues = null;\\\\n }\\\\n\\\\n async getSlice(offset, size) {\\\\n const fallbackSize = this.bigTiff ? 4048 : 1024;\\\\n return new _dataslice__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"](\\\\n await this.source.fetch(\\\\n offset, typeof size !== 'undefined' ? size : fallbackSize,\\\\n ), offset, this.littleEndian, this.bigTiff,\\\\n );\\\\n }\\\\n\\\\n /**\\\\n * Instructs to parse an image file directory at the given file offset.\\\\n * As there is no way to ensure that a location is indeed the start of an IFD,\\\\n * this function must be called with caution (e.g only using the IFD offsets from\\\\n * the headers or other IFDs).\\\\n * @param {number} offset the offset to parse the IFD at\\\\n * @returns {ImageFileDirectory} the parsed IFD\\\\n */\\\\n async parseFileDirectoryAt(offset) {\\\\n const entrySize = this.bigTiff ? 20 : 12;\\\\n const offsetSize = this.bigTiff ? 8 : 2;\\\\n\\\\n let dataSlice = await this.getSlice(offset);\\\\n const numDirEntries = this.bigTiff ?\\\\n dataSlice.readUint64(offset) :\\\\n dataSlice.readUint16(offset);\\\\n\\\\n // if the slice does not cover the whole IFD, request a bigger slice, where the\\\\n // whole IFD fits: num of entries + n x tag length + offset to next IFD\\\\n const byteSize = (numDirEntries * entrySize) + (this.bigTiff ? 16 : 6);\\\\n if (!dataSlice.covers(offset, byteSize)) {\\\\n dataSlice = await this.getSlice(offset, byteSize);\\\\n }\\\\n\\\\n const fileDirectory = {};\\\\n\\\\n // loop over the IFD and create a file directory object\\\\n let i = offset + (this.bigTiff ? 8 : 2);\\\\n for (let entryCount = 0; entryCount < numDirEntries; i += entrySize, ++entryCount) {\\\\n const fieldTag = dataSlice.readUint16(i);\\\\n const fieldType = dataSlice.readUint16(i + 2);\\\\n const typeCount = this.bigTiff ?\\\\n dataSlice.readUint64(i + 4) :\\\\n dataSlice.readUint32(i + 4);\\\\n\\\\n let fieldValues;\\\\n let value;\\\\n const fieldTypeLength = getFieldTypeLength(fieldType);\\\\n const valueOffset = i + (this.bigTiff ? 12 : 8);\\\\n\\\\n // check whether the value is directly encoded in the tag or refers to a\\\\n // different external byte range\\\\n if (fieldTypeLength * typeCount <= (this.bigTiff ? 8 : 4)) {\\\\n fieldValues = getValues(dataSlice, fieldType, typeCount, valueOffset);\\\\n } else {\\\\n // resolve the reference to the actual byte range\\\\n const actualOffset = dataSlice.readOffset(valueOffset);\\\\n const length = getFieldTypeLength(fieldType) * typeCount;\\\\n\\\\n // check, whether we actually cover the referenced byte range; if not,\\\\n // request a new slice of bytes to read from it\\\\n if (dataSlice.covers(actualOffset, length)) {\\\\n fieldValues = getValues(dataSlice, fieldType, typeCount, actualOffset);\\\\n } else {\\\\n const fieldDataSlice = await this.getSlice(actualOffset, length);\\\\n fieldValues = getValues(fieldDataSlice, fieldType, typeCount, actualOffset);\\\\n }\\\\n }\\\\n\\\\n // unpack single values from the array\\\\n if (typeCount === 1 && _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"arrayFields\\\\\\\"].indexOf(fieldTag) === -1 &&\\\\n !(fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].RATIONAL || fieldType === _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].SRATIONAL)) {\\\\n value = fieldValues[0];\\\\n } else {\\\\n value = fieldValues;\\\\n }\\\\n\\\\n // write the tags value to the file directly\\\\n fileDirectory[_globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTagNames\\\\\\\"][fieldTag]] = value;\\\\n }\\\\n const geoKeyDirectory = parseGeoKeyDirectory(fileDirectory);\\\\n const nextIFDByteOffset = dataSlice.readOffset(\\\\n offset + offsetSize + (entrySize * numDirEntries),\\\\n );\\\\n\\\\n return new ImageFileDirectory(\\\\n fileDirectory,\\\\n geoKeyDirectory,\\\\n nextIFDByteOffset,\\\\n );\\\\n }\\\\n\\\\n async requestIFD(index) {\\\\n // see if we already have that IFD index requested.\\\\n if (this.ifdRequests[index]) {\\\\n // attach to an already requested IFD\\\\n return this.ifdRequests[index];\\\\n } else if (index === 0) {\\\\n // special case for index 0\\\\n this.ifdRequests[index] = this.parseFileDirectoryAt(this.firstIFDOffset);\\\\n return this.ifdRequests[index];\\\\n } else if (!this.ifdRequests[index - 1]) {\\\\n // if the previous IFD was not yet loaded, load that one first\\\\n // this is the recursive call.\\\\n try {\\\\n this.ifdRequests[index - 1] = this.requestIFD(index - 1);\\\\n } catch (e) {\\\\n // if the previous one already was an index error, rethrow\\\\n // with the current index\\\\n if (e instanceof GeoTIFFImageIndexError) {\\\\n throw new GeoTIFFImageIndexError(index);\\\\n }\\\\n // rethrow anything else\\\\n throw e;\\\\n }\\\\n }\\\\n // if the previous IFD was loaded, we can finally fetch the one we are interested in.\\\\n // we need to wrap this in an IIFE, otherwise this.ifdRequests[index] would be delayed\\\\n this.ifdRequests[index] = (async () => {\\\\n const previousIfd = await this.ifdRequests[index - 1];\\\\n if (previousIfd.nextIFDByteOffset === 0) {\\\\n throw new GeoTIFFImageIndexError(index);\\\\n }\\\\n return this.parseFileDirectoryAt(previousIfd.nextIFDByteOffset);\\\\n })();\\\\n return this.ifdRequests[index];\\\\n }\\\\n\\\\n /**\\\\n * Get the n-th internal subfile of an image. By default, the first is returned.\\\\n *\\\\n * @param {Number} [index=0] the index of the image to return.\\\\n * @returns {GeoTIFFImage} the image at the given index\\\\n */\\\\n async getImage(index = 0) {\\\\n const ifd = await this.requestIFD(index);\\\\n return new _geotiffimage__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"](\\\\n ifd.fileDirectory, ifd.geoKeyDirectory,\\\\n this.dataView, this.littleEndian, this.cache, this.source,\\\\n );\\\\n }\\\\n\\\\n /**\\\\n * Returns the count of the internal subfiles.\\\\n *\\\\n * @returns {Number} the number of internal subfile images\\\\n */\\\\n async getImageCount() {\\\\n let index = 0;\\\\n // loop until we run out of IFDs\\\\n let hasNext = true;\\\\n while (hasNext) {\\\\n try {\\\\n await this.requestIFD(index);\\\\n ++index;\\\\n } catch (e) {\\\\n if (e instanceof GeoTIFFImageIndexError) {\\\\n hasNext = false;\\\\n } else {\\\\n throw e;\\\\n }\\\\n }\\\\n }\\\\n return index;\\\\n }\\\\n\\\\n /**\\\\n * Get the values of the COG ghost area as a parsed map.\\\\n * See https://gdal.org/drivers/raster/cog.html#header-ghost-area for reference\\\\n * @returns {Object} the parsed ghost area or null, if no such area was found\\\\n */\\\\n async getGhostValues() {\\\\n const offset = this.bigTiff ? 16 : 8;\\\\n if (this.ghostValues) {\\\\n return this.ghostValues;\\\\n }\\\\n const detectionString = 'GDAL_STRUCTURAL_METADATA_SIZE=';\\\\n const heuristicAreaSize = detectionString.length + 100;\\\\n let slice = await this.getSlice(offset, heuristicAreaSize);\\\\n if (detectionString === getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII, detectionString.length, offset)) {\\\\n const valuesString = getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII, heuristicAreaSize, offset);\\\\n const firstLine = valuesString.split('\\\\\\\\n')[0];\\\\n const metadataSize = Number(firstLine.split('=')[1].split(' ')[0]) + firstLine.length;\\\\n if (metadataSize > heuristicAreaSize) {\\\\n slice = await this.getSlice(offset, metadataSize);\\\\n }\\\\n const fullString = getValues(slice, _globals__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"fieldTypes\\\\\\\"].ASCII, metadataSize, offset);\\\\n this.ghostValues = {};\\\\n fullString\\\\n .split('\\\\\\\\n')\\\\n .filter(line => line.length > 0)\\\\n .map(line => line.split('='))\\\\n .forEach(([key, value]) => {\\\\n this.ghostValues[key] = value;\\\\n });\\\\n }\\\\n return this.ghostValues;\\\\n }\\\\n\\\\n /**\\\\n * Parse a (Geo)TIFF file from the given source.\\\\n *\\\\n * @param {source~Source} source The source of data to parse from.\\\\n * @param {object} options Additional options.\\\\n */\\\\n static async fromSource(source, options) {\\\\n const headerData = await source.fetch(0, 1024);\\\\n const dataView = new _dataview64__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"](headerData);\\\\n\\\\n const BOM = dataView.getUint16(0, 0);\\\\n let littleEndian;\\\\n if (BOM === 0x4949) {\\\\n littleEndian = true;\\\\n } else if (BOM === 0x4D4D) {\\\\n littleEndian = false;\\\\n } else {\\\\n throw new TypeError('Invalid byte order value.');\\\\n }\\\\n\\\\n const magicNumber = dataView.getUint16(2, littleEndian);\\\\n let bigTiff;\\\\n if (magicNumber === 42) {\\\\n bigTiff = false;\\\\n } else if (magicNumber === 43) {\\\\n bigTiff = true;\\\\n const offsetByteSize = dataView.getUint16(4, littleEndian);\\\\n if (offsetByteSize !== 8) {\\\\n throw new Error('Unsupported offset byte-size.');\\\\n }\\\\n } else {\\\\n throw new TypeError('Invalid magic number.');\\\\n }\\\\n\\\\n const firstIFDOffset = bigTiff\\\\n ? dataView.getUint64(8, littleEndian)\\\\n : dataView.getUint32(4, littleEndian);\\\\n return new GeoTIFF(source, littleEndian, bigTiff, firstIFDOffset, options);\\\\n }\\\\n\\\\n /**\\\\n * Closes the underlying file buffer\\\\n * N.B. After the GeoTIFF has been completely processed it needs\\\\n * to be closed but only if it has been constructed from a file.\\\\n */\\\\n close() {\\\\n if (typeof this.source.close === 'function') {\\\\n return this.source.close();\\\\n }\\\\n return false;\\\\n }\\\\n}\\\\n\\\\n\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (GeoTIFF);\\\\n\\\\n/**\\\\n * Wrapper for GeoTIFF files that have external overviews.\\\\n * @augments GeoTIFFBase\\\\n */\\\\nclass MultiGeoTIFF extends GeoTIFFBase {\\\\n /**\\\\n * Construct a new MultiGeoTIFF from a main and several overview files.\\\\n * @param {GeoTIFF} mainFile The main GeoTIFF file.\\\\n * @param {GeoTIFF[]} overviewFiles An array of overview files.\\\\n */\\\\n constructor(mainFile, overviewFiles) {\\\\n super();\\\\n this.mainFile = mainFile;\\\\n this.overviewFiles = overviewFiles;\\\\n this.imageFiles = [mainFile].concat(overviewFiles);\\\\n\\\\n this.fileDirectoriesPerFile = null;\\\\n this.fileDirectoriesPerFileParsing = null;\\\\n this.imageCount = null;\\\\n }\\\\n\\\\n async parseFileDirectoriesPerFile() {\\\\n const requests = [this.mainFile.parseFileDirectoryAt(this.mainFile.firstIFDOffset)]\\\\n .concat(this.overviewFiles.map((file) => file.parseFileDirectoryAt(file.firstIFDOffset)));\\\\n\\\\n this.fileDirectoriesPerFile = await Promise.all(requests);\\\\n return this.fileDirectoriesPerFile;\\\\n }\\\\n\\\\n /**\\\\n * Get the n-th internal subfile of an image. By default, the first is returned.\\\\n *\\\\n * @param {Number} [index=0] the index of the image to return.\\\\n * @returns {GeoTIFFImage} the image at the given index\\\\n */\\\\n async getImage(index = 0) {\\\\n await this.getImageCount();\\\\n await this.parseFileDirectoriesPerFile();\\\\n let visited = 0;\\\\n let relativeIndex = 0;\\\\n for (let i = 0; i < this.imageFiles.length; i++) {\\\\n const imageFile = this.imageFiles[i];\\\\n for (let ii = 0; ii < this.imageCounts[i]; ii++) {\\\\n if (index === visited) {\\\\n const ifd = await imageFile.requestIFD(relativeIndex);\\\\n return new _geotiffimage__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"](\\\\n ifd.fileDirectory, imageFile.geoKeyDirectory,\\\\n imageFile.dataView, imageFile.littleEndian, imageFile.cache, imageFile.source,\\\\n );\\\\n }\\\\n visited++;\\\\n relativeIndex++;\\\\n }\\\\n relativeIndex = 0;\\\\n }\\\\n\\\\n throw new RangeError('Invalid image index');\\\\n }\\\\n\\\\n /**\\\\n * Returns the count of the internal subfiles.\\\\n *\\\\n * @returns {Number} the number of internal subfile images\\\\n */\\\\n async getImageCount() {\\\\n if (this.imageCount !== null) {\\\\n return this.imageCount;\\\\n }\\\\n const requests = [this.mainFile.getImageCount()]\\\\n .concat(this.overviewFiles.map((file) => file.getImageCount()));\\\\n this.imageCounts = await Promise.all(requests);\\\\n this.imageCount = this.imageCounts.reduce((count, ifds) => count + ifds, 0);\\\\n return this.imageCount;\\\\n }\\\\n}\\\\n\\\\n\\\\n\\\\n/**\\\\n * Creates a new GeoTIFF from a remote URL.\\\\n * @param {string} url The URL to access the image from\\\\n * @param {object} [options] Additional options to pass to the source.\\\\n * See {@link makeRemoteSource} for details.\\\\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\\\\n */\\\\nasync function fromUrl(url, options = {}) {\\\\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeRemoteSource\\\\\\\"])(url, options));\\\\n}\\\\n\\\\n/**\\\\n * Construct a new GeoTIFF from an\\\\n * [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.\\\\n * @param {ArrayBuffer} arrayBuffer The data to read the file from.\\\\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\\\\n */\\\\nasync function fromArrayBuffer(arrayBuffer) {\\\\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeBufferSource\\\\\\\"])(arrayBuffer));\\\\n}\\\\n\\\\n/**\\\\n * Construct a GeoTIFF from a local file path. This uses the node\\\\n * [filesystem API]{@link https://nodejs.org/api/fs.html} and is\\\\n * not available on browsers.\\\\n *\\\\n * N.B. After the GeoTIFF has been completely processed it needs\\\\n * to be closed but only if it has been constructed from a file.\\\\n * @param {string} path The file path to read from.\\\\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\\\\n */\\\\nasync function fromFile(path) {\\\\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeFileSource\\\\\\\"])(path));\\\\n}\\\\n\\\\n/**\\\\n * Construct a GeoTIFF from an HTML\\\\n * [Blob]{@link https://developer.mozilla.org/en-US/docs/Web/API/Blob} or\\\\n * [File]{@link https://developer.mozilla.org/en-US/docs/Web/API/File}\\\\n * object.\\\\n * @param {Blob|File} blob The Blob or File object to read from.\\\\n * @returns {Promise.<GeoTIFF>} The resulting GeoTIFF file.\\\\n */\\\\nasync function fromBlob(blob) {\\\\n return GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeFileReaderSource\\\\\\\"])(blob));\\\\n}\\\\n\\\\n/**\\\\n * Construct a MultiGeoTIFF from the given URLs.\\\\n * @param {string} mainUrl The URL for the main file.\\\\n * @param {string[]} overviewUrls An array of URLs for the overview images.\\\\n * @param {object} [options] Additional options to pass to the source.\\\\n * See [makeRemoteSource]{@link module:source.makeRemoteSource}\\\\n * for details.\\\\n * @returns {Promise.<MultiGeoTIFF>} The resulting MultiGeoTIFF file.\\\\n */\\\\nasync function fromUrls(mainUrl, overviewUrls = [], options = {}) {\\\\n const mainFile = await GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeRemoteSource\\\\\\\"])(mainUrl, options));\\\\n const overviewFiles = await Promise.all(\\\\n overviewUrls.map((url) => GeoTIFF.fromSource(Object(_source__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"makeRemoteSource\\\\\\\"])(url, options))),\\\\n );\\\\n\\\\n return new MultiGeoTIFF(mainFile, overviewFiles);\\\\n}\\\\n\\\\n/**\\\\n * Main creating function for GeoTIFF files.\\\\n * @param {(Array)} array of pixel values\\\\n * @returns {metadata} metadata\\\\n */\\\\nasync function writeArrayBuffer(values, metadata) {\\\\n return Object(_geotiffwriter__WEBPACK_IMPORTED_MODULE_6__[\\\\\\\"writeGeotiff\\\\\\\"])(values, metadata);\\\\n}\\\\n\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiff.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/geotiffimage.js\\\":\\n/*!**************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/geotiffimage.js ***!\\n \\\\**************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var txml__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! txml */ \\\\\\\"./node_modules/txml/tXml.js\\\\\\\");\\\\n/* harmony import */ var txml__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(txml__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./globals */ \\\\\\\"./node_modules/geotiff/src/globals.js\\\\\\\");\\\\n/* harmony import */ var _rgb__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./rgb */ \\\\\\\"./node_modules/geotiff/src/rgb.js\\\\\\\");\\\\n/* harmony import */ var _compression__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./compression */ \\\\\\\"./node_modules/geotiff/src/compression/index.js\\\\\\\");\\\\n/* harmony import */ var _resample__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./resample */ \\\\\\\"./node_modules/geotiff/src/resample.js\\\\\\\");\\\\n/* eslint max-len: [\\\\\\\"error\\\\\\\", { \\\\\\\"code\\\\\\\": 120 }] */\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nfunction sum(array, start, end) {\\\\n let s = 0;\\\\n for (let i = start; i < end; ++i) {\\\\n s += array[i];\\\\n }\\\\n return s;\\\\n}\\\\n\\\\nfunction arrayForType(format, bitsPerSample, size) {\\\\n switch (format) {\\\\n case 1: // unsigned integer data\\\\n switch (bitsPerSample) {\\\\n case 8:\\\\n return new Uint8Array(size);\\\\n case 16:\\\\n return new Uint16Array(size);\\\\n case 32:\\\\n return new Uint32Array(size);\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n case 2: // twos complement signed integer data\\\\n switch (bitsPerSample) {\\\\n case 8:\\\\n return new Int8Array(size);\\\\n case 16:\\\\n return new Int16Array(size);\\\\n case 32:\\\\n return new Int32Array(size);\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n case 3: // floating point data\\\\n switch (bitsPerSample) {\\\\n case 32:\\\\n return new Float32Array(size);\\\\n case 64:\\\\n return new Float64Array(size);\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n default:\\\\n break;\\\\n }\\\\n throw Error('Unsupported data format/bitsPerSample');\\\\n}\\\\n\\\\n/**\\\\n * GeoTIFF sub-file image.\\\\n */\\\\nclass GeoTIFFImage {\\\\n /**\\\\n * @constructor\\\\n * @param {Object} fileDirectory The parsed file directory\\\\n * @param {Object} geoKeys The parsed geo-keys\\\\n * @param {DataView} dataView The DataView for the underlying file.\\\\n * @param {Boolean} littleEndian Whether the file is encoded in little or big endian\\\\n * @param {Boolean} cache Whether or not decoded tiles shall be cached\\\\n * @param {Source} source The datasource to read from\\\\n */\\\\n constructor(fileDirectory, geoKeys, dataView, littleEndian, cache, source) {\\\\n this.fileDirectory = fileDirectory;\\\\n this.geoKeys = geoKeys;\\\\n this.dataView = dataView;\\\\n this.littleEndian = littleEndian;\\\\n this.tiles = cache ? {} : null;\\\\n this.isTiled = !fileDirectory.StripOffsets;\\\\n const planarConfiguration = fileDirectory.PlanarConfiguration;\\\\n this.planarConfiguration = (typeof planarConfiguration === 'undefined') ? 1 : planarConfiguration;\\\\n if (this.planarConfiguration !== 1 && this.planarConfiguration !== 2) {\\\\n throw new Error('Invalid planar configuration.');\\\\n }\\\\n\\\\n this.source = source;\\\\n }\\\\n\\\\n /**\\\\n * Returns the associated parsed file directory.\\\\n * @returns {Object} the parsed file directory\\\\n */\\\\n getFileDirectory() {\\\\n return this.fileDirectory;\\\\n }\\\\n\\\\n /**\\\\n * Returns the associated parsed geo keys.\\\\n * @returns {Object} the parsed geo keys\\\\n */\\\\n getGeoKeys() {\\\\n return this.geoKeys;\\\\n }\\\\n\\\\n /**\\\\n * Returns the width of the image.\\\\n * @returns {Number} the width of the image\\\\n */\\\\n getWidth() {\\\\n return this.fileDirectory.ImageWidth;\\\\n }\\\\n\\\\n /**\\\\n * Returns the height of the image.\\\\n * @returns {Number} the height of the image\\\\n */\\\\n getHeight() {\\\\n return this.fileDirectory.ImageLength;\\\\n }\\\\n\\\\n /**\\\\n * Returns the number of samples per pixel.\\\\n * @returns {Number} the number of samples per pixel\\\\n */\\\\n getSamplesPerPixel() {\\\\n return this.fileDirectory.SamplesPerPixel;\\\\n }\\\\n\\\\n /**\\\\n * Returns the width of each tile.\\\\n * @returns {Number} the width of each tile\\\\n */\\\\n getTileWidth() {\\\\n return this.isTiled ? this.fileDirectory.TileWidth : this.getWidth();\\\\n }\\\\n\\\\n /**\\\\n * Returns the height of each tile.\\\\n * @returns {Number} the height of each tile\\\\n */\\\\n getTileHeight() {\\\\n if (this.isTiled) {\\\\n return this.fileDirectory.TileLength;\\\\n }\\\\n if (typeof this.fileDirectory.RowsPerStrip !== 'undefined') {\\\\n return Math.min(this.fileDirectory.RowsPerStrip, this.getHeight());\\\\n }\\\\n return this.getHeight();\\\\n }\\\\n\\\\n /**\\\\n * Calculates the number of bytes for each pixel across all samples. Only full\\\\n * bytes are supported, an exception is thrown when this is not the case.\\\\n * @returns {Number} the bytes per pixel\\\\n */\\\\n getBytesPerPixel() {\\\\n let bitsPerSample = 0;\\\\n for (let i = 0; i < this.fileDirectory.BitsPerSample.length; ++i) {\\\\n const bits = this.fileDirectory.BitsPerSample[i];\\\\n if ((bits % 8) !== 0) {\\\\n throw new Error(`Sample bit-width of ${bits} is not supported.`);\\\\n } else if (bits !== this.fileDirectory.BitsPerSample[0]) {\\\\n throw new Error('Differing size of samples in a pixel are not supported.');\\\\n }\\\\n bitsPerSample += bits;\\\\n }\\\\n return bitsPerSample / 8;\\\\n }\\\\n\\\\n getSampleByteSize(i) {\\\\n if (i >= this.fileDirectory.BitsPerSample.length) {\\\\n throw new RangeError(`Sample index ${i} is out of range.`);\\\\n }\\\\n const bits = this.fileDirectory.BitsPerSample[i];\\\\n if ((bits % 8) !== 0) {\\\\n throw new Error(`Sample bit-width of ${bits} is not supported.`);\\\\n }\\\\n return (bits / 8);\\\\n }\\\\n\\\\n getReaderForSample(sampleIndex) {\\\\n const format = this.fileDirectory.SampleFormat\\\\n ? this.fileDirectory.SampleFormat[sampleIndex] : 1;\\\\n const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];\\\\n switch (format) {\\\\n case 1: // unsigned integer data\\\\n switch (bitsPerSample) {\\\\n case 8:\\\\n return DataView.prototype.getUint8;\\\\n case 16:\\\\n return DataView.prototype.getUint16;\\\\n case 32:\\\\n return DataView.prototype.getUint32;\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n case 2: // twos complement signed integer data\\\\n switch (bitsPerSample) {\\\\n case 8:\\\\n return DataView.prototype.getInt8;\\\\n case 16:\\\\n return DataView.prototype.getInt16;\\\\n case 32:\\\\n return DataView.prototype.getInt32;\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n case 3:\\\\n switch (bitsPerSample) {\\\\n case 32:\\\\n return DataView.prototype.getFloat32;\\\\n case 64:\\\\n return DataView.prototype.getFloat64;\\\\n default:\\\\n break;\\\\n }\\\\n break;\\\\n default:\\\\n break;\\\\n }\\\\n throw Error('Unsupported data format/bitsPerSample');\\\\n }\\\\n\\\\n getArrayForSample(sampleIndex, size) {\\\\n const format = this.fileDirectory.SampleFormat\\\\n ? this.fileDirectory.SampleFormat[sampleIndex] : 1;\\\\n const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];\\\\n return arrayForType(format, bitsPerSample, size);\\\\n }\\\\n\\\\n /**\\\\n * Returns the decoded strip or tile.\\\\n * @param {Number} x the strip or tile x-offset\\\\n * @param {Number} y the tile y-offset (0 for stripped images)\\\\n * @param {Number} sample the sample to get for separated samples\\\\n * @param {Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool\\\\n * @returns {Promise.<ArrayBuffer>}\\\\n */\\\\n async getTileOrStrip(x, y, sample, poolOrDecoder) {\\\\n const numTilesPerRow = Math.ceil(this.getWidth() / this.getTileWidth());\\\\n const numTilesPerCol = Math.ceil(this.getHeight() / this.getTileHeight());\\\\n let index;\\\\n const { tiles } = this;\\\\n if (this.planarConfiguration === 1) {\\\\n index = (y * numTilesPerRow) + x;\\\\n } else if (this.planarConfiguration === 2) {\\\\n index = (sample * numTilesPerRow * numTilesPerCol) + (y * numTilesPerRow) + x;\\\\n }\\\\n\\\\n let offset;\\\\n let byteCount;\\\\n if (this.isTiled) {\\\\n offset = this.fileDirectory.TileOffsets[index];\\\\n byteCount = this.fileDirectory.TileByteCounts[index];\\\\n } else {\\\\n offset = this.fileDirectory.StripOffsets[index];\\\\n byteCount = this.fileDirectory.StripByteCounts[index];\\\\n }\\\\n const slice = await this.source.fetch(offset, byteCount);\\\\n\\\\n // either use the provided pool or decoder to decode the data\\\\n let request;\\\\n if (tiles === null) {\\\\n request = poolOrDecoder.decode(this.fileDirectory, slice);\\\\n } else if (!tiles[index]) {\\\\n request = poolOrDecoder.decode(this.fileDirectory, slice);\\\\n tiles[index] = request;\\\\n }\\\\n return { x, y, sample, data: await request };\\\\n }\\\\n\\\\n /**\\\\n * Internal read function.\\\\n * @private\\\\n * @param {Array} imageWindow The image window in pixel coordinates\\\\n * @param {Array} samples The selected samples (0-based indices)\\\\n * @param {TypedArray[]|TypedArray} valueArrays The array(s) to write into\\\\n * @param {Boolean} interleave Whether or not to write in an interleaved manner\\\\n * @param {Pool} pool The decoder pool\\\\n * @returns {Promise<TypedArray[]>|Promise<TypedArray>}\\\\n */\\\\n async _readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod) {\\\\n const tileWidth = this.getTileWidth();\\\\n const tileHeight = this.getTileHeight();\\\\n\\\\n const minXTile = Math.max(Math.floor(imageWindow[0] / tileWidth), 0);\\\\n const maxXTile = Math.min(\\\\n Math.ceil(imageWindow[2] / tileWidth),\\\\n Math.ceil(this.getWidth() / this.getTileWidth()),\\\\n );\\\\n const minYTile = Math.max(Math.floor(imageWindow[1] / tileHeight), 0);\\\\n const maxYTile = Math.min(\\\\n Math.ceil(imageWindow[3] / tileHeight),\\\\n Math.ceil(this.getHeight() / this.getTileHeight()),\\\\n );\\\\n const windowWidth = imageWindow[2] - imageWindow[0];\\\\n\\\\n let bytesPerPixel = this.getBytesPerPixel();\\\\n\\\\n const srcSampleOffsets = [];\\\\n const sampleReaders = [];\\\\n for (let i = 0; i < samples.length; ++i) {\\\\n if (this.planarConfiguration === 1) {\\\\n srcSampleOffsets.push(sum(this.fileDirectory.BitsPerSample, 0, samples[i]) / 8);\\\\n } else {\\\\n srcSampleOffsets.push(0);\\\\n }\\\\n sampleReaders.push(this.getReaderForSample(samples[i]));\\\\n }\\\\n\\\\n const promises = [];\\\\n const { littleEndian } = this;\\\\n\\\\n for (let yTile = minYTile; yTile < maxYTile; ++yTile) {\\\\n for (let xTile = minXTile; xTile < maxXTile; ++xTile) {\\\\n for (let sampleIndex = 0; sampleIndex < samples.length; ++sampleIndex) {\\\\n const si = sampleIndex;\\\\n const sample = samples[sampleIndex];\\\\n if (this.planarConfiguration === 2) {\\\\n bytesPerPixel = this.getSampleByteSize(sample);\\\\n }\\\\n const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder);\\\\n promises.push(promise);\\\\n promise.then((tile) => {\\\\n const buffer = tile.data;\\\\n const dataView = new DataView(buffer);\\\\n const firstLine = tile.y * tileHeight;\\\\n const firstCol = tile.x * tileWidth;\\\\n const lastLine = (tile.y + 1) * tileHeight;\\\\n const lastCol = (tile.x + 1) * tileWidth;\\\\n const reader = sampleReaders[si];\\\\n\\\\n const ymax = Math.min(tileHeight, tileHeight - (lastLine - imageWindow[3]));\\\\n const xmax = Math.min(tileWidth, tileWidth - (lastCol - imageWindow[2]));\\\\n\\\\n for (let y = Math.max(0, imageWindow[1] - firstLine); y < ymax; ++y) {\\\\n for (let x = Math.max(0, imageWindow[0] - firstCol); x < xmax; ++x) {\\\\n const pixelOffset = ((y * tileWidth) + x) * bytesPerPixel;\\\\n const value = reader.call(\\\\n dataView, pixelOffset + srcSampleOffsets[si], littleEndian,\\\\n );\\\\n let windowCoordinate;\\\\n if (interleave) {\\\\n windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth * samples.length)\\\\n + ((x + firstCol - imageWindow[0]) * samples.length)\\\\n + si;\\\\n valueArrays[windowCoordinate] = value;\\\\n } else {\\\\n windowCoordinate = (\\\\n (y + firstLine - imageWindow[1]) * windowWidth\\\\n ) + x + firstCol - imageWindow[0];\\\\n valueArrays[si][windowCoordinate] = value;\\\\n }\\\\n }\\\\n }\\\\n });\\\\n }\\\\n }\\\\n }\\\\n await Promise.all(promises);\\\\n\\\\n if ((width && (imageWindow[2] - imageWindow[0]) !== width)\\\\n || (height && (imageWindow[3] - imageWindow[1]) !== height)) {\\\\n let resampled;\\\\n if (interleave) {\\\\n resampled = Object(_resample__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"resampleInterleaved\\\\\\\"])(\\\\n valueArrays,\\\\n imageWindow[2] - imageWindow[0],\\\\n imageWindow[3] - imageWindow[1],\\\\n width, height,\\\\n samples.length,\\\\n resampleMethod,\\\\n );\\\\n } else {\\\\n resampled = Object(_resample__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"resample\\\\\\\"])(\\\\n valueArrays,\\\\n imageWindow[2] - imageWindow[0],\\\\n imageWindow[3] - imageWindow[1],\\\\n width, height,\\\\n resampleMethod,\\\\n );\\\\n }\\\\n resampled.width = width;\\\\n resampled.height = height;\\\\n return resampled;\\\\n }\\\\n\\\\n valueArrays.width = width || imageWindow[2] - imageWindow[0];\\\\n valueArrays.height = height || imageWindow[3] - imageWindow[1];\\\\n\\\\n return valueArrays;\\\\n }\\\\n\\\\n /**\\\\n * Reads raster data from the image. This function reads all selected samples\\\\n * into separate arrays of the correct type for that sample or into a single\\\\n * combined array when `interleave` is set. When provided, only a subset\\\\n * of the raster is read for each sample.\\\\n *\\\\n * @param {Object} [options={}] optional parameters\\\\n * @param {Array} [options.window=whole image] the subset to read data from.\\\\n * @param {Array} [options.samples=all samples] the selection of samples to read from.\\\\n * @param {Boolean} [options.interleave=false] whether the data shall be read\\\\n * in one single array or separate\\\\n * arrays.\\\\n * @param {Number} [options.pool=null] The optional decoder pool to use.\\\\n * @param {number} [options.width] The desired width of the output. When the width is\\\\n * not the same as the images, resampling will be\\\\n * performed.\\\\n * @param {number} [options.height] The desired height of the output. When the width\\\\n * is not the same as the images, resampling will\\\\n * be performed.\\\\n * @param {string} [options.resampleMethod='nearest'] The desired resampling method.\\\\n * @param {number|number[]} [options.fillValue] The value to use for parts of the image\\\\n * outside of the images extent. When\\\\n * multiple samples are requested, an\\\\n * array of fill values can be passed.\\\\n * @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise\\\\n */\\\\n async readRasters({\\\\n window: wnd, samples = [], interleave, pool = null,\\\\n width, height, resampleMethod, fillValue,\\\\n } = {}) {\\\\n const imageWindow = wnd || [0, 0, this.getWidth(), this.getHeight()];\\\\n\\\\n // check parameters\\\\n if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {\\\\n throw new Error('Invalid subsets');\\\\n }\\\\n\\\\n const imageWindowWidth = imageWindow[2] - imageWindow[0];\\\\n const imageWindowHeight = imageWindow[3] - imageWindow[1];\\\\n const numPixels = imageWindowWidth * imageWindowHeight;\\\\n\\\\n if (!samples || !samples.length) {\\\\n for (let i = 0; i < this.fileDirectory.SamplesPerPixel; ++i) {\\\\n samples.push(i);\\\\n }\\\\n } else {\\\\n for (let i = 0; i < samples.length; ++i) {\\\\n if (samples[i] >= this.fileDirectory.SamplesPerPixel) {\\\\n return Promise.reject(new RangeError(`Invalid sample index '${samples[i]}'.`));\\\\n }\\\\n }\\\\n }\\\\n let valueArrays;\\\\n if (interleave) {\\\\n const format = this.fileDirectory.SampleFormat\\\\n ? Math.max.apply(null, this.fileDirectory.SampleFormat) : 1;\\\\n const bitsPerSample = Math.max.apply(null, this.fileDirectory.BitsPerSample);\\\\n valueArrays = arrayForType(format, bitsPerSample, numPixels * samples.length);\\\\n if (fillValue) {\\\\n valueArrays.fill(fillValue);\\\\n }\\\\n } else {\\\\n valueArrays = [];\\\\n for (let i = 0; i < samples.length; ++i) {\\\\n const valueArray = this.getArrayForSample(samples[i], numPixels);\\\\n if (Array.isArray(fillValue) && i < fillValue.length) {\\\\n valueArray.fill(fillValue[i]);\\\\n } else if (fillValue && !Array.isArray(fillValue)) {\\\\n valueArray.fill(fillValue);\\\\n }\\\\n valueArrays.push(valueArray);\\\\n }\\\\n }\\\\n\\\\n const poolOrDecoder = pool || Object(_compression__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"getDecoder\\\\\\\"])(this.fileDirectory);\\\\n\\\\n const result = await this._readRaster(\\\\n imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod,\\\\n );\\\\n return result;\\\\n }\\\\n\\\\n /**\\\\n * Reads raster data from the image as RGB. The result is always an\\\\n * interleaved typed array.\\\\n * Colorspaces other than RGB will be transformed to RGB, color maps expanded.\\\\n * When no other method is applicable, the first sample is used to produce a\\\\n * greayscale image.\\\\n * When provided, only a subset of the raster is read for each sample.\\\\n *\\\\n * @param {Object} [options] optional parameters\\\\n * @param {Array} [options.window=whole image] the subset to read data from.\\\\n * @param {Number} [pool=null] The optional decoder pool to use.\\\\n * @param {number} [width] The desired width of the output. When the width is no the\\\\n * same as the images, resampling will be performed.\\\\n * @param {number} [height] The desired height of the output. When the width is no the\\\\n * same as the images, resampling will be performed.\\\\n * @param {string} [resampleMethod='nearest'] The desired resampling method.\\\\n * @param {bool} [enableAlpha=false] Enable reading alpha channel if present.\\\\n * @returns {Promise.<TypedArray|TypedArray[]>} the RGB array as a Promise\\\\n */\\\\n async readRGB({ window, pool = null, width, height, resampleMethod, enableAlpha = false } = {}) {\\\\n const imageWindow = window || [0, 0, this.getWidth(), this.getHeight()];\\\\n\\\\n // check parameters\\\\n if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {\\\\n throw new Error('Invalid subsets');\\\\n }\\\\n\\\\n const pi = this.fileDirectory.PhotometricInterpretation;\\\\n\\\\n if (pi === _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].RGB) {\\\\n let s = [0, 1, 2];\\\\n if ((!(this.fileDirectory.ExtraSamples === _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"ExtraSamplesValues\\\\\\\"].Unspecified)) && enableAlpha) {\\\\n s = [];\\\\n for (let i = 0; i < this.fileDirectory.BitsPerSample.length; i += 1) {\\\\n s.push(i);\\\\n }\\\\n }\\\\n return this.readRasters({\\\\n window,\\\\n interleave: true,\\\\n samples: s,\\\\n pool,\\\\n width,\\\\n height,\\\\n });\\\\n }\\\\n\\\\n let samples;\\\\n switch (pi) {\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].WhiteIsZero:\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].BlackIsZero:\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].Palette:\\\\n samples = [0];\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].CMYK:\\\\n samples = [0, 1, 2, 3];\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].YCbCr:\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].CIELab:\\\\n samples = [0, 1, 2];\\\\n break;\\\\n default:\\\\n throw new Error('Invalid or unsupported photometric interpretation.');\\\\n }\\\\n\\\\n const subOptions = {\\\\n window: imageWindow,\\\\n interleave: true,\\\\n samples,\\\\n pool,\\\\n width,\\\\n height,\\\\n resampleMethod,\\\\n };\\\\n const { fileDirectory } = this;\\\\n const raster = await this.readRasters(subOptions);\\\\n\\\\n const max = 2 ** this.fileDirectory.BitsPerSample[0];\\\\n let data;\\\\n switch (pi) {\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].WhiteIsZero:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromWhiteIsZero\\\\\\\"])(raster, max);\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].BlackIsZero:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromBlackIsZero\\\\\\\"])(raster, max);\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].Palette:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromPalette\\\\\\\"])(raster, fileDirectory.ColorMap);\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].CMYK:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromCMYK\\\\\\\"])(raster);\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].YCbCr:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromYCbCr\\\\\\\"])(raster);\\\\n break;\\\\n case _globals__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"photometricInterpretations\\\\\\\"].CIELab:\\\\n data = Object(_rgb__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"fromCIELab\\\\\\\"])(raster);\\\\n break;\\\\n default:\\\\n throw new Error('Unsupported photometric interpretation.');\\\\n }\\\\n data.width = raster.width;\\\\n data.height = raster.height;\\\\n return data;\\\\n }\\\\n\\\\n /**\\\\n * Returns an array of tiepoints.\\\\n * @returns {Object[]}\\\\n */\\\\n getTiePoints() {\\\\n if (!this.fileDirectory.ModelTiepoint) {\\\\n return [];\\\\n }\\\\n\\\\n const tiePoints = [];\\\\n for (let i = 0; i < this.fileDirectory.ModelTiepoint.length; i += 6) {\\\\n tiePoints.push({\\\\n i: this.fileDirectory.ModelTiepoint[i],\\\\n j: this.fileDirectory.ModelTiepoint[i + 1],\\\\n k: this.fileDirectory.ModelTiepoint[i + 2],\\\\n x: this.fileDirectory.ModelTiepoint[i + 3],\\\\n y: this.fileDirectory.ModelTiepoint[i + 4],\\\\n z: this.fileDirectory.ModelTiepoint[i + 5],\\\\n });\\\\n }\\\\n return tiePoints;\\\\n }\\\\n\\\\n /**\\\\n * Returns the parsed GDAL metadata items.\\\\n *\\\\n * If sample is passed to null, dataset-level metadata will be returned.\\\\n * Otherwise only metadata specific to the provided sample will be returned.\\\\n *\\\\n * @param {Number} [sample=null] The sample index.\\\\n * @returns {Object}\\\\n */\\\\n getGDALMetadata(sample = null) {\\\\n const metadata = {};\\\\n if (!this.fileDirectory.GDAL_METADATA) {\\\\n return null;\\\\n }\\\\n const string = this.fileDirectory.GDAL_METADATA;\\\\n const xmlDom = txml__WEBPACK_IMPORTED_MODULE_0___default()(string.substring(0, string.length - 1));\\\\n\\\\n if (!xmlDom[0].tagName) {\\\\n throw new Error('Failed to parse GDAL metadata XML.');\\\\n }\\\\n\\\\n const root = xmlDom[0];\\\\n if (root.tagName !== 'GDALMetadata') {\\\\n throw new Error('Unexpected GDAL metadata XML tag.');\\\\n }\\\\n\\\\n let items = root.children\\\\n .filter((child) => child.tagName === 'Item');\\\\n\\\\n if (sample) {\\\\n items = items.filter((item) => Number(item.attributes.sample) === sample);\\\\n }\\\\n\\\\n for (let i = 0; i < items.length; ++i) {\\\\n const item = items[i];\\\\n metadata[item.attributes.name] = item.children[0];\\\\n }\\\\n return metadata;\\\\n }\\\\n\\\\n /**\\\\n * Returns the GDAL nodata value\\\\n * @returns {Number} or null\\\\n */\\\\n getGDALNoData() {\\\\n if (!this.fileDirectory.GDAL_NODATA) {\\\\n return null;\\\\n }\\\\n const string = this.fileDirectory.GDAL_NODATA;\\\\n return Number(string.substring(0, string.length - 1));\\\\n }\\\\n\\\\n /**\\\\n * Returns the image origin as a XYZ-vector. When the image has no affine\\\\n * transformation, then an exception is thrown.\\\\n * @returns {Array} The origin as a vector\\\\n */\\\\n getOrigin() {\\\\n const tiePoints = this.fileDirectory.ModelTiepoint;\\\\n const modelTransformation = this.fileDirectory.ModelTransformation;\\\\n if (tiePoints && tiePoints.length === 6) {\\\\n return [\\\\n tiePoints[3],\\\\n tiePoints[4],\\\\n tiePoints[5],\\\\n ];\\\\n }\\\\n if (modelTransformation) {\\\\n return [\\\\n modelTransformation[3],\\\\n modelTransformation[7],\\\\n modelTransformation[11],\\\\n ];\\\\n }\\\\n throw new Error('The image does not have an affine transformation.');\\\\n }\\\\n\\\\n /**\\\\n * Returns the image resolution as a XYZ-vector. When the image has no affine\\\\n * transformation, then an exception is thrown.\\\\n * @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from\\\\n * in cases when the current image does not have the\\\\n * required tags on its own.\\\\n * @returns {Array} The resolution as a vector\\\\n */\\\\n getResolution(referenceImage = null) {\\\\n const modelPixelScale = this.fileDirectory.ModelPixelScale;\\\\n const modelTransformation = this.fileDirectory.ModelTransformation;\\\\n\\\\n if (modelPixelScale) {\\\\n return [\\\\n modelPixelScale[0],\\\\n -modelPixelScale[1],\\\\n modelPixelScale[2],\\\\n ];\\\\n }\\\\n if (modelTransformation) {\\\\n return [\\\\n modelTransformation[0],\\\\n modelTransformation[5],\\\\n modelTransformation[10],\\\\n ];\\\\n }\\\\n\\\\n if (referenceImage) {\\\\n const [refResX, refResY, refResZ] = referenceImage.getResolution();\\\\n return [\\\\n refResX * referenceImage.getWidth() / this.getWidth(),\\\\n refResY * referenceImage.getHeight() / this.getHeight(),\\\\n refResZ * referenceImage.getWidth() / this.getWidth(),\\\\n ];\\\\n }\\\\n\\\\n throw new Error('The image does not have an affine transformation.');\\\\n }\\\\n\\\\n /**\\\\n * Returns whether or not the pixels of the image depict an area (or point).\\\\n * @returns {Boolean} Whether the pixels are a point\\\\n */\\\\n pixelIsArea() {\\\\n return this.geoKeys.GTRasterTypeGeoKey === 1;\\\\n }\\\\n\\\\n /**\\\\n * Returns the image bounding box as an array of 4 values: min-x, min-y,\\\\n * max-x and max-y. When the image has no affine transformation, then an\\\\n * exception is thrown.\\\\n * @returns {Array} The bounding box\\\\n */\\\\n getBoundingBox() {\\\\n const origin = this.getOrigin();\\\\n const resolution = this.getResolution();\\\\n\\\\n const x1 = origin[0];\\\\n const y1 = origin[1];\\\\n\\\\n const x2 = x1 + (resolution[0] * this.getWidth());\\\\n const y2 = y1 + (resolution[1] * this.getHeight());\\\\n\\\\n return [\\\\n Math.min(x1, x2),\\\\n Math.min(y1, y2),\\\\n Math.max(x1, x2),\\\\n Math.max(y1, y2),\\\\n ];\\\\n }\\\\n}\\\\n\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (GeoTIFFImage);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiffimage.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/geotiffwriter.js\\\":\\n/*!***************************************************!*\\\\\\n !*** ./node_modules/geotiff/src/geotiffwriter.js ***!\\n \\\\***************************************************/\\n/*! exports provided: writeGeotiff */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"writeGeotiff\\\\\\\", function() { return writeGeotiff; });\\\\n/* harmony import */ var _globals__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./globals */ \\\\\\\"./node_modules/geotiff/src/globals.js\\\\\\\");\\\\n/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./utils */ \\\\\\\"./node_modules/geotiff/src/utils.js\\\\\\\");\\\\n/*\\\\n Some parts of this file are based on UTIF.js,\\\\n which was released under the MIT License.\\\\n You can view that here:\\\\n https://github.com/photopea/UTIF.js/blob/master/LICENSE\\\\n*/\\\\n\\\\n\\\\n\\\\nconst tagName2Code = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"invert\\\\\\\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"fieldTagNames\\\\\\\"]);\\\\nconst geoKeyName2Code = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"invert\\\\\\\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"geoKeyNames\\\\\\\"]);\\\\nconst name2code = {};\\\\nObject(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"assign\\\\\\\"])(name2code, tagName2Code);\\\\nObject(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"assign\\\\\\\"])(name2code, geoKeyName2Code);\\\\nconst typeName2byte = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"invert\\\\\\\"])(_globals__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"fieldTypeNames\\\\\\\"]);\\\\n\\\\n// config variables\\\\nconst numBytesInIfd = 1000;\\\\n\\\\nconst _binBE = {\\\\n nextZero: (data, o) => {\\\\n let oincr = o;\\\\n while (data[oincr] !== 0) {\\\\n oincr++;\\\\n }\\\\n return oincr;\\\\n },\\\\n readUshort: (buff, p) => {\\\\n return (buff[p] << 8) | buff[p + 1];\\\\n },\\\\n readShort: (buff, p) => {\\\\n const a = _binBE.ui8;\\\\n a[0] = buff[p + 1];\\\\n a[1] = buff[p + 0];\\\\n return _binBE.i16[0];\\\\n },\\\\n readInt: (buff, p) => {\\\\n const a = _binBE.ui8;\\\\n a[0] = buff[p + 3];\\\\n a[1] = buff[p + 2];\\\\n a[2] = buff[p + 1];\\\\n a[3] = buff[p + 0];\\\\n return _binBE.i32[0];\\\\n },\\\\n readUint: (buff, p) => {\\\\n const a = _binBE.ui8;\\\\n a[0] = buff[p + 3];\\\\n a[1] = buff[p + 2];\\\\n a[2] = buff[p + 1];\\\\n a[3] = buff[p + 0];\\\\n return _binBE.ui32[0];\\\\n },\\\\n readASCII: (buff, p, l) => {\\\\n return l.map((i) => String.fromCharCode(buff[p + i])).join('');\\\\n },\\\\n readFloat: (buff, p) => {\\\\n const a = _binBE.ui8;\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(4, (i) => {\\\\n a[i] = buff[p + 3 - i];\\\\n });\\\\n return _binBE.fl32[0];\\\\n },\\\\n readDouble: (buff, p) => {\\\\n const a = _binBE.ui8;\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(8, (i) => {\\\\n a[i] = buff[p + 7 - i];\\\\n });\\\\n return _binBE.fl64[0];\\\\n },\\\\n writeUshort: (buff, p, n) => {\\\\n buff[p] = (n >> 8) & 255;\\\\n buff[p + 1] = n & 255;\\\\n },\\\\n writeUint: (buff, p, n) => {\\\\n buff[p] = (n >> 24) & 255;\\\\n buff[p + 1] = (n >> 16) & 255;\\\\n buff[p + 2] = (n >> 8) & 255;\\\\n buff[p + 3] = (n >> 0) & 255;\\\\n },\\\\n writeASCII: (buff, p, s) => {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(s.length, (i) => {\\\\n buff[p + i] = s.charCodeAt(i);\\\\n });\\\\n },\\\\n ui8: new Uint8Array(8),\\\\n};\\\\n\\\\n_binBE.fl64 = new Float64Array(_binBE.ui8.buffer);\\\\n\\\\n_binBE.writeDouble = (buff, p, n) => {\\\\n _binBE.fl64[0] = n;\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(8, (i) => {\\\\n buff[p + i] = _binBE.ui8[7 - i];\\\\n });\\\\n};\\\\n\\\\n\\\\nconst _writeIFD = (bin, data, _offset, ifd) => {\\\\n let offset = _offset;\\\\n\\\\n const keys = Object.keys(ifd).filter((key) => {\\\\n return key !== undefined && key !== null && key !== 'undefined';\\\\n });\\\\n\\\\n bin.writeUshort(data, offset, keys.length);\\\\n offset += 2;\\\\n\\\\n let eoff = offset + (12 * keys.length) + 4;\\\\n\\\\n for (const key of keys) {\\\\n let tag = null;\\\\n if (typeof key === 'number') {\\\\n tag = key;\\\\n } else if (typeof key === 'string') {\\\\n tag = parseInt(key, 10);\\\\n }\\\\n\\\\n const typeName = _globals__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"fieldTagTypes\\\\\\\"][tag];\\\\n const typeNum = typeName2byte[typeName];\\\\n\\\\n if (typeName == null || typeName === undefined || typeof typeName === 'undefined') {\\\\n throw new Error(`unknown type of tag: ${tag}`);\\\\n }\\\\n\\\\n let val = ifd[key];\\\\n\\\\n if (typeof val === 'undefined') {\\\\n throw new Error(`failed to get value for key ${key}`);\\\\n }\\\\n\\\\n // ASCIIZ format with trailing 0 character\\\\n // http://www.fileformat.info/format/tiff/corion.htm\\\\n // https://stackoverflow.com/questions/7783044/whats-the-difference-between-asciiz-vs-ascii\\\\n if (typeName === 'ASCII' && typeof val === 'string' && Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"endsWith\\\\\\\"])(val, '\\\\\\\\u0000') === false) {\\\\n val += '\\\\\\\\u0000';\\\\n }\\\\n\\\\n const num = val.length;\\\\n\\\\n bin.writeUshort(data, offset, tag);\\\\n offset += 2;\\\\n\\\\n bin.writeUshort(data, offset, typeNum);\\\\n offset += 2;\\\\n\\\\n bin.writeUint(data, offset, num);\\\\n offset += 4;\\\\n\\\\n let dlen = [-1, 1, 1, 2, 4, 8, 0, 0, 0, 0, 0, 0, 8][typeNum] * num;\\\\n let toff = offset;\\\\n\\\\n if (dlen > 4) {\\\\n bin.writeUint(data, offset, eoff);\\\\n toff = eoff;\\\\n }\\\\n\\\\n if (typeName === 'ASCII') {\\\\n bin.writeASCII(data, toff, val);\\\\n } else if (typeName === 'SHORT') {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(num, (i) => {\\\\n bin.writeUshort(data, toff + (2 * i), val[i]);\\\\n });\\\\n } else if (typeName === 'LONG') {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(num, (i) => {\\\\n bin.writeUint(data, toff + (4 * i), val[i]);\\\\n });\\\\n } else if (typeName === 'RATIONAL') {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(num, (i) => {\\\\n bin.writeUint(data, toff + (8 * i), Math.round(val[i] * 10000));\\\\n bin.writeUint(data, toff + (8 * i) + 4, 10000);\\\\n });\\\\n } else if (typeName === 'DOUBLE') {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(num, (i) => {\\\\n bin.writeDouble(data, toff + (8 * i), val[i]);\\\\n });\\\\n }\\\\n\\\\n if (dlen > 4) {\\\\n dlen += (dlen & 1);\\\\n eoff += dlen;\\\\n }\\\\n\\\\n offset += 4;\\\\n }\\\\n\\\\n return [offset, eoff];\\\\n};\\\\n\\\\nconst encodeIfds = (ifds) => {\\\\n const data = new Uint8Array(numBytesInIfd);\\\\n let offset = 4;\\\\n const bin = _binBE;\\\\n\\\\n // set big-endian byte-order\\\\n // https://en.wikipedia.org/wiki/TIFF#Byte_order\\\\n data[0] = 77;\\\\n data[1] = 77;\\\\n\\\\n // set format-version number\\\\n // https://en.wikipedia.org/wiki/TIFF#Byte_order\\\\n data[3] = 42;\\\\n\\\\n let ifdo = 8;\\\\n\\\\n bin.writeUint(data, offset, ifdo);\\\\n\\\\n offset += 4;\\\\n\\\\n ifds.forEach((ifd, i) => {\\\\n const noffs = _writeIFD(bin, data, ifdo, ifd);\\\\n ifdo = noffs[1];\\\\n if (i < ifds.length - 1) {\\\\n bin.writeUint(data, noffs[0], ifdo);\\\\n }\\\\n });\\\\n\\\\n if (data.slice) {\\\\n return data.slice(0, ifdo).buffer;\\\\n }\\\\n\\\\n // node hasn't implemented slice on Uint8Array yet\\\\n const result = new Uint8Array(ifdo);\\\\n for (let i = 0; i < ifdo; i++) {\\\\n result[i] = data[i];\\\\n }\\\\n return result.buffer;\\\\n};\\\\n\\\\nconst encodeImage = (values, width, height, metadata) => {\\\\n if (height === undefined || height === null) {\\\\n throw new Error(`you passed into encodeImage a width of type ${height}`);\\\\n }\\\\n\\\\n if (width === undefined || width === null) {\\\\n throw new Error(`you passed into encodeImage a width of type ${width}`);\\\\n }\\\\n\\\\n const ifd = {\\\\n 256: [width], // ImageWidth\\\\n 257: [height], // ImageLength\\\\n 273: [numBytesInIfd], // strips offset\\\\n 278: [height], // RowsPerStrip\\\\n 305: 'geotiff.js', // no array for ASCII(Z)\\\\n };\\\\n\\\\n if (metadata) {\\\\n for (const i in metadata) {\\\\n if (metadata.hasOwnProperty(i)) {\\\\n ifd[i] = metadata[i];\\\\n }\\\\n }\\\\n }\\\\n\\\\n const prfx = new Uint8Array(encodeIfds([ifd]));\\\\n\\\\n const img = new Uint8Array(values);\\\\n\\\\n const samplesPerPixel = ifd[277];\\\\n\\\\n const data = new Uint8Array(numBytesInIfd + (width * height * samplesPerPixel));\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(prfx.length, (i) => {\\\\n data[i] = prfx[i];\\\\n });\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"forEach\\\\\\\"])(img, (value, i) => {\\\\n data[numBytesInIfd + i] = value;\\\\n });\\\\n\\\\n return data.buffer;\\\\n};\\\\n\\\\nconst convertToTids = (input) => {\\\\n const result = {};\\\\n for (const key in input) {\\\\n if (key !== 'StripOffsets') {\\\\n if (!name2code[key]) {\\\\n console.error(key, 'not in name2code:', Object.keys(name2code));\\\\n }\\\\n result[name2code[key]] = input[key];\\\\n }\\\\n }\\\\n return result;\\\\n};\\\\n\\\\nconst toArray = (input) => {\\\\n if (Array.isArray(input)) {\\\\n return input;\\\\n }\\\\n return [input];\\\\n};\\\\n\\\\nconst metadataDefaults = [\\\\n ['Compression', 1], // no compression\\\\n ['PlanarConfiguration', 1],\\\\n ['XPosition', 0],\\\\n ['YPosition', 0],\\\\n ['ResolutionUnit', 1], // Code 1 for actual pixel count or 2 for pixels per inch.\\\\n ['ExtraSamples', 0], // should this be an array??\\\\n ['GeoAsciiParams', 'WGS 84\\\\\\\\u0000'],\\\\n ['ModelTiepoint', [0, 0, 0, -180, 90, 0]], // raster fits whole globe\\\\n ['GTModelTypeGeoKey', 2],\\\\n ['GTRasterTypeGeoKey', 1],\\\\n ['GeographicTypeGeoKey', 4326],\\\\n ['GeogCitationGeoKey', 'WGS 84'],\\\\n];\\\\n\\\\nfunction writeGeotiff(data, metadata) {\\\\n const isFlattened = typeof data[0] === 'number';\\\\n\\\\n let height;\\\\n let numBands;\\\\n let width;\\\\n let flattenedValues;\\\\n\\\\n if (isFlattened) {\\\\n height = metadata.height || metadata.ImageLength;\\\\n width = metadata.width || metadata.ImageWidth;\\\\n numBands = data.length / (height * width);\\\\n flattenedValues = data;\\\\n } else {\\\\n numBands = data.length;\\\\n height = data[0].length;\\\\n width = data[0][0].length;\\\\n flattenedValues = [];\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(height, (rowIndex) => {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(width, (columnIndex) => {\\\\n Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(numBands, (bandIndex) => {\\\\n flattenedValues.push(data[bandIndex][rowIndex][columnIndex]);\\\\n });\\\\n });\\\\n });\\\\n }\\\\n\\\\n metadata.ImageLength = height;\\\\n delete metadata.height;\\\\n metadata.ImageWidth = width;\\\\n delete metadata.width;\\\\n\\\\n // consult https://www.loc.gov/preservation/digital/formats/content/tiff_tags.shtml\\\\n\\\\n if (!metadata.BitsPerSample) {\\\\n metadata.BitsPerSample = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(numBands, () => 8);\\\\n }\\\\n\\\\n metadataDefaults.forEach((tag) => {\\\\n const key = tag[0];\\\\n if (!metadata[key]) {\\\\n const value = tag[1];\\\\n metadata[key] = value;\\\\n }\\\\n });\\\\n\\\\n // The color space of the image data.\\\\n // 1=black is zero and 2=RGB.\\\\n if (!metadata.PhotometricInterpretation) {\\\\n metadata.PhotometricInterpretation = metadata.BitsPerSample.length === 3 ? 2 : 1;\\\\n }\\\\n\\\\n // The number of components per pixel.\\\\n if (!metadata.SamplesPerPixel) {\\\\n metadata.SamplesPerPixel = [numBands];\\\\n }\\\\n\\\\n if (!metadata.StripByteCounts) {\\\\n // we are only writing one strip\\\\n metadata.StripByteCounts = [numBands * height * width];\\\\n }\\\\n\\\\n if (!metadata.ModelPixelScale) {\\\\n // assumes raster takes up exactly the whole globe\\\\n metadata.ModelPixelScale = [360 / width, 180 / height, 0];\\\\n }\\\\n\\\\n if (!metadata.SampleFormat) {\\\\n metadata.SampleFormat = Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"times\\\\\\\"])(numBands, () => 1);\\\\n }\\\\n\\\\n\\\\n const geoKeys = Object.keys(metadata)\\\\n .filter((key) => Object(_utils__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"endsWith\\\\\\\"])(key, 'GeoKey'))\\\\n .sort((a, b) => name2code[a] - name2code[b]);\\\\n\\\\n if (!metadata.GeoKeyDirectory) {\\\\n const NumberOfKeys = geoKeys.length;\\\\n\\\\n const GeoKeyDirectory = [1, 1, 0, NumberOfKeys];\\\\n geoKeys.forEach((geoKey) => {\\\\n const KeyID = Number(name2code[geoKey]);\\\\n GeoKeyDirectory.push(KeyID);\\\\n\\\\n let Count;\\\\n let TIFFTagLocation;\\\\n let valueOffset;\\\\n if (_globals__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"fieldTagTypes\\\\\\\"][KeyID] === 'SHORT') {\\\\n Count = 1;\\\\n TIFFTagLocation = 0;\\\\n valueOffset = metadata[geoKey];\\\\n } else if (geoKey === 'GeogCitationGeoKey') {\\\\n Count = metadata.GeoAsciiParams.length;\\\\n TIFFTagLocation = Number(name2code.GeoAsciiParams);\\\\n valueOffset = 0;\\\\n } else {\\\\n console.log(`[geotiff.js] couldn't get TIFFTagLocation for ${geoKey}`);\\\\n }\\\\n GeoKeyDirectory.push(TIFFTagLocation);\\\\n GeoKeyDirectory.push(Count);\\\\n GeoKeyDirectory.push(valueOffset);\\\\n });\\\\n metadata.GeoKeyDirectory = GeoKeyDirectory;\\\\n }\\\\n\\\\n // delete GeoKeys from metadata, because stored in GeoKeyDirectory tag\\\\n for (const geoKey in geoKeys) {\\\\n if (geoKeys.hasOwnProperty(geoKey)) {\\\\n delete metadata[geoKey];\\\\n }\\\\n }\\\\n\\\\n [\\\\n 'Compression',\\\\n 'ExtraSamples',\\\\n 'GeographicTypeGeoKey',\\\\n 'GTModelTypeGeoKey',\\\\n 'GTRasterTypeGeoKey',\\\\n 'ImageLength', // synonym of ImageHeight\\\\n 'ImageWidth',\\\\n 'PhotometricInterpretation',\\\\n 'PlanarConfiguration',\\\\n 'ResolutionUnit',\\\\n 'SamplesPerPixel',\\\\n 'XPosition',\\\\n 'YPosition',\\\\n ].forEach((name) => {\\\\n if (metadata[name]) {\\\\n metadata[name] = toArray(metadata[name]);\\\\n }\\\\n });\\\\n\\\\n\\\\n const encodedMetadata = convertToTids(metadata);\\\\n\\\\n const outputImage = encodeImage(flattenedValues, width, height, encodedMetadata);\\\\n\\\\n return outputImage;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/geotiffwriter.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/globals.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/globals.js ***!\\n \\\\*********************************************/\\n/*! exports provided: fieldTagNames, fieldTags, fieldTagTypes, arrayFields, fieldTypeNames, fieldTypes, photometricInterpretations, ExtraSamplesValues, geoKeyNames, geoKeys */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fieldTagNames\\\\\\\", function() { return fieldTagNames; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fieldTags\\\\\\\", function() { return fieldTags; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fieldTagTypes\\\\\\\", function() { return fieldTagTypes; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"arrayFields\\\\\\\", function() { return arrayFields; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fieldTypeNames\\\\\\\", function() { return fieldTypeNames; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fieldTypes\\\\\\\", function() { return fieldTypes; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"photometricInterpretations\\\\\\\", function() { return photometricInterpretations; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"ExtraSamplesValues\\\\\\\", function() { return ExtraSamplesValues; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"geoKeyNames\\\\\\\", function() { return geoKeyNames; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"geoKeys\\\\\\\", function() { return geoKeys; });\\\\nconst fieldTagNames = {\\\\n // TIFF Baseline\\\\n 0x013B: 'Artist',\\\\n 0x0102: 'BitsPerSample',\\\\n 0x0109: 'CellLength',\\\\n 0x0108: 'CellWidth',\\\\n 0x0140: 'ColorMap',\\\\n 0x0103: 'Compression',\\\\n 0x8298: 'Copyright',\\\\n 0x0132: 'DateTime',\\\\n 0x0152: 'ExtraSamples',\\\\n 0x010A: 'FillOrder',\\\\n 0x0121: 'FreeByteCounts',\\\\n 0x0120: 'FreeOffsets',\\\\n 0x0123: 'GrayResponseCurve',\\\\n 0x0122: 'GrayResponseUnit',\\\\n 0x013C: 'HostComputer',\\\\n 0x010E: 'ImageDescription',\\\\n 0x0101: 'ImageLength',\\\\n 0x0100: 'ImageWidth',\\\\n 0x010F: 'Make',\\\\n 0x0119: 'MaxSampleValue',\\\\n 0x0118: 'MinSampleValue',\\\\n 0x0110: 'Model',\\\\n 0x00FE: 'NewSubfileType',\\\\n 0x0112: 'Orientation',\\\\n 0x0106: 'PhotometricInterpretation',\\\\n 0x011C: 'PlanarConfiguration',\\\\n 0x0128: 'ResolutionUnit',\\\\n 0x0116: 'RowsPerStrip',\\\\n 0x0115: 'SamplesPerPixel',\\\\n 0x0131: 'Software',\\\\n 0x0117: 'StripByteCounts',\\\\n 0x0111: 'StripOffsets',\\\\n 0x00FF: 'SubfileType',\\\\n 0x0107: 'Threshholding',\\\\n 0x011A: 'XResolution',\\\\n 0x011B: 'YResolution',\\\\n\\\\n // TIFF Extended\\\\n 0x0146: 'BadFaxLines',\\\\n 0x0147: 'CleanFaxData',\\\\n 0x0157: 'ClipPath',\\\\n 0x0148: 'ConsecutiveBadFaxLines',\\\\n 0x01B1: 'Decode',\\\\n 0x01B2: 'DefaultImageColor',\\\\n 0x010D: 'DocumentName',\\\\n 0x0150: 'DotRange',\\\\n 0x0141: 'HalftoneHints',\\\\n 0x015A: 'Indexed',\\\\n 0x015B: 'JPEGTables',\\\\n 0x011D: 'PageName',\\\\n 0x0129: 'PageNumber',\\\\n 0x013D: 'Predictor',\\\\n 0x013F: 'PrimaryChromaticities',\\\\n 0x0214: 'ReferenceBlackWhite',\\\\n 0x0153: 'SampleFormat',\\\\n 0x0154: 'SMinSampleValue',\\\\n 0x0155: 'SMaxSampleValue',\\\\n 0x022F: 'StripRowCounts',\\\\n 0x014A: 'SubIFDs',\\\\n 0x0124: 'T4Options',\\\\n 0x0125: 'T6Options',\\\\n 0x0145: 'TileByteCounts',\\\\n 0x0143: 'TileLength',\\\\n 0x0144: 'TileOffsets',\\\\n 0x0142: 'TileWidth',\\\\n 0x012D: 'TransferFunction',\\\\n 0x013E: 'WhitePoint',\\\\n 0x0158: 'XClipPathUnits',\\\\n 0x011E: 'XPosition',\\\\n 0x0211: 'YCbCrCoefficients',\\\\n 0x0213: 'YCbCrPositioning',\\\\n 0x0212: 'YCbCrSubSampling',\\\\n 0x0159: 'YClipPathUnits',\\\\n 0x011F: 'YPosition',\\\\n\\\\n // EXIF\\\\n 0x9202: 'ApertureValue',\\\\n 0xA001: 'ColorSpace',\\\\n 0x9004: 'DateTimeDigitized',\\\\n 0x9003: 'DateTimeOriginal',\\\\n 0x8769: 'Exif IFD',\\\\n 0x9000: 'ExifVersion',\\\\n 0x829A: 'ExposureTime',\\\\n 0xA300: 'FileSource',\\\\n 0x9209: 'Flash',\\\\n 0xA000: 'FlashpixVersion',\\\\n 0x829D: 'FNumber',\\\\n 0xA420: 'ImageUniqueID',\\\\n 0x9208: 'LightSource',\\\\n 0x927C: 'MakerNote',\\\\n 0x9201: 'ShutterSpeedValue',\\\\n 0x9286: 'UserComment',\\\\n\\\\n // IPTC\\\\n 0x83BB: 'IPTC',\\\\n\\\\n // ICC\\\\n 0x8773: 'ICC Profile',\\\\n\\\\n // XMP\\\\n 0x02BC: 'XMP',\\\\n\\\\n // GDAL\\\\n 0xA480: 'GDAL_METADATA',\\\\n 0xA481: 'GDAL_NODATA',\\\\n\\\\n // Photoshop\\\\n 0x8649: 'Photoshop',\\\\n\\\\n // GeoTiff\\\\n 0x830E: 'ModelPixelScale',\\\\n 0x8482: 'ModelTiepoint',\\\\n 0x85D8: 'ModelTransformation',\\\\n 0x87AF: 'GeoKeyDirectory',\\\\n 0x87B0: 'GeoDoubleParams',\\\\n 0x87B1: 'GeoAsciiParams',\\\\n};\\\\n\\\\nconst fieldTags = {};\\\\nfor (const key in fieldTagNames) {\\\\n if (fieldTagNames.hasOwnProperty(key)) {\\\\n fieldTags[fieldTagNames[key]] = parseInt(key, 10);\\\\n }\\\\n}\\\\n\\\\nconst fieldTagTypes = {\\\\n 256: 'SHORT',\\\\n 257: 'SHORT',\\\\n 258: 'SHORT',\\\\n 259: 'SHORT',\\\\n 262: 'SHORT',\\\\n 273: 'LONG',\\\\n 274: 'SHORT',\\\\n 277: 'SHORT',\\\\n 278: 'LONG',\\\\n 279: 'LONG',\\\\n 282: 'RATIONAL',\\\\n 283: 'RATIONAL',\\\\n 284: 'SHORT',\\\\n 286: 'SHORT',\\\\n 287: 'RATIONAL',\\\\n 296: 'SHORT',\\\\n 305: 'ASCII',\\\\n 306: 'ASCII',\\\\n 338: 'SHORT',\\\\n 339: 'SHORT',\\\\n 513: 'LONG',\\\\n 514: 'LONG',\\\\n 1024: 'SHORT',\\\\n 1025: 'SHORT',\\\\n 2048: 'SHORT',\\\\n 2049: 'ASCII',\\\\n 33550: 'DOUBLE',\\\\n 33922: 'DOUBLE',\\\\n 34665: 'LONG',\\\\n 34735: 'SHORT',\\\\n 34737: 'ASCII',\\\\n 42113: 'ASCII',\\\\n};\\\\n\\\\nconst arrayFields = [\\\\n fieldTags.BitsPerSample,\\\\n fieldTags.ExtraSamples,\\\\n fieldTags.SampleFormat,\\\\n fieldTags.StripByteCounts,\\\\n fieldTags.StripOffsets,\\\\n fieldTags.StripRowCounts,\\\\n fieldTags.TileByteCounts,\\\\n fieldTags.TileOffsets,\\\\n];\\\\n\\\\nconst fieldTypeNames = {\\\\n 0x0001: 'BYTE',\\\\n 0x0002: 'ASCII',\\\\n 0x0003: 'SHORT',\\\\n 0x0004: 'LONG',\\\\n 0x0005: 'RATIONAL',\\\\n 0x0006: 'SBYTE',\\\\n 0x0007: 'UNDEFINED',\\\\n 0x0008: 'SSHORT',\\\\n 0x0009: 'SLONG',\\\\n 0x000A: 'SRATIONAL',\\\\n 0x000B: 'FLOAT',\\\\n 0x000C: 'DOUBLE',\\\\n // IFD offset, suggested by https://owl.phy.queensu.ca/~phil/exiftool/standards.html\\\\n 0x000D: 'IFD',\\\\n // introduced by BigTIFF\\\\n 0x0010: 'LONG8',\\\\n 0x0011: 'SLONG8',\\\\n 0x0012: 'IFD8',\\\\n};\\\\n\\\\nconst fieldTypes = {};\\\\nfor (const key in fieldTypeNames) {\\\\n if (fieldTypeNames.hasOwnProperty(key)) {\\\\n fieldTypes[fieldTypeNames[key]] = parseInt(key, 10);\\\\n }\\\\n}\\\\n\\\\nconst photometricInterpretations = {\\\\n WhiteIsZero: 0,\\\\n BlackIsZero: 1,\\\\n RGB: 2,\\\\n Palette: 3,\\\\n TransparencyMask: 4,\\\\n CMYK: 5,\\\\n YCbCr: 6,\\\\n\\\\n CIELab: 8,\\\\n ICCLab: 9,\\\\n};\\\\n\\\\nconst ExtraSamplesValues = {\\\\n Unspecified: 0,\\\\n Assocalpha: 1,\\\\n Unassalpha: 2,\\\\n};\\\\n\\\\n\\\\nconst geoKeyNames = {\\\\n 1024: 'GTModelTypeGeoKey',\\\\n 1025: 'GTRasterTypeGeoKey',\\\\n 1026: 'GTCitationGeoKey',\\\\n 2048: 'GeographicTypeGeoKey',\\\\n 2049: 'GeogCitationGeoKey',\\\\n 2050: 'GeogGeodeticDatumGeoKey',\\\\n 2051: 'GeogPrimeMeridianGeoKey',\\\\n 2052: 'GeogLinearUnitsGeoKey',\\\\n 2053: 'GeogLinearUnitSizeGeoKey',\\\\n 2054: 'GeogAngularUnitsGeoKey',\\\\n 2055: 'GeogAngularUnitSizeGeoKey',\\\\n 2056: 'GeogEllipsoidGeoKey',\\\\n 2057: 'GeogSemiMajorAxisGeoKey',\\\\n 2058: 'GeogSemiMinorAxisGeoKey',\\\\n 2059: 'GeogInvFlatteningGeoKey',\\\\n 2060: 'GeogAzimuthUnitsGeoKey',\\\\n 2061: 'GeogPrimeMeridianLongGeoKey',\\\\n 2062: 'GeogTOWGS84GeoKey',\\\\n 3072: 'ProjectedCSTypeGeoKey',\\\\n 3073: 'PCSCitationGeoKey',\\\\n 3074: 'ProjectionGeoKey',\\\\n 3075: 'ProjCoordTransGeoKey',\\\\n 3076: 'ProjLinearUnitsGeoKey',\\\\n 3077: 'ProjLinearUnitSizeGeoKey',\\\\n 3078: 'ProjStdParallel1GeoKey',\\\\n 3079: 'ProjStdParallel2GeoKey',\\\\n 3080: 'ProjNatOriginLongGeoKey',\\\\n 3081: 'ProjNatOriginLatGeoKey',\\\\n 3082: 'ProjFalseEastingGeoKey',\\\\n 3083: 'ProjFalseNorthingGeoKey',\\\\n 3084: 'ProjFalseOriginLongGeoKey',\\\\n 3085: 'ProjFalseOriginLatGeoKey',\\\\n 3086: 'ProjFalseOriginEastingGeoKey',\\\\n 3087: 'ProjFalseOriginNorthingGeoKey',\\\\n 3088: 'ProjCenterLongGeoKey',\\\\n 3089: 'ProjCenterLatGeoKey',\\\\n 3090: 'ProjCenterEastingGeoKey',\\\\n 3091: 'ProjCenterNorthingGeoKey',\\\\n 3092: 'ProjScaleAtNatOriginGeoKey',\\\\n 3093: 'ProjScaleAtCenterGeoKey',\\\\n 3094: 'ProjAzimuthAngleGeoKey',\\\\n 3095: 'ProjStraightVertPoleLongGeoKey',\\\\n 3096: 'ProjRectifiedGridAngleGeoKey',\\\\n 4096: 'VerticalCSTypeGeoKey',\\\\n 4097: 'VerticalCitationGeoKey',\\\\n 4098: 'VerticalDatumGeoKey',\\\\n 4099: 'VerticalUnitsGeoKey',\\\\n};\\\\n\\\\nconst geoKeys = {};\\\\nfor (const key in geoKeyNames) {\\\\n if (geoKeyNames.hasOwnProperty(key)) {\\\\n geoKeys[geoKeyNames[key]] = parseInt(key, 10);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/globals.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/logging.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/logging.js ***!\\n \\\\*********************************************/\\n/*! exports provided: setLogger, log, info, warn, error, time, timeEnd */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"setLogger\\\\\\\", function() { return setLogger; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"log\\\\\\\", function() { return log; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"info\\\\\\\", function() { return info; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"warn\\\\\\\", function() { return warn; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"error\\\\\\\", function() { return error; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"time\\\\\\\", function() { return time; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"timeEnd\\\\\\\", function() { return timeEnd; });\\\\n\\\\n/**\\\\n * A no-op logger\\\\n */\\\\nclass DummyLogger {\\\\n log() {}\\\\n\\\\n info() {}\\\\n\\\\n warn() {}\\\\n\\\\n error() {}\\\\n\\\\n time() {}\\\\n\\\\n timeEnd() {}\\\\n}\\\\n\\\\nlet LOGGER = new DummyLogger();\\\\n\\\\n/**\\\\n *\\\\n * @param {object} logger the new logger. e.g `console`\\\\n */\\\\nfunction setLogger(logger = new DummyLogger()) {\\\\n LOGGER = logger;\\\\n}\\\\n\\\\nfunction log(...args) {\\\\n return LOGGER.log(...args);\\\\n}\\\\n\\\\nfunction info(...args) {\\\\n return LOGGER.info(...args);\\\\n}\\\\n\\\\nfunction warn(...args) {\\\\n return LOGGER.warn(...args);\\\\n}\\\\n\\\\nfunction error(...args) {\\\\n return LOGGER.error(...args);\\\\n}\\\\n\\\\nfunction time(...args) {\\\\n return LOGGER.time(...args);\\\\n}\\\\n\\\\nfunction timeEnd(...args) {\\\\n return LOGGER.timeEnd(...args);\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/logging.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/pool.js\\\":\\n/*!******************************************!*\\\\\\n !*** ./node_modules/geotiff/src/pool.js ***!\\n \\\\******************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* WEBPACK VAR INJECTION */(function(__webpack__worker__1) {/* harmony import */ var threads__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! threads */ \\\\\\\"./node_modules/threads/dist-esm/index.js\\\\\\\");\\\\n\\\\n\\\\nconst defaultPoolSize = typeof navigator !== 'undefined' ? navigator.hardwareConcurrency : null;\\\\n\\\\n/**\\\\n * @module pool\\\\n */\\\\n\\\\n/**\\\\n * Pool for workers to decode chunks of the images.\\\\n */\\\\nclass Pool {\\\\n /**\\\\n * @constructor\\\\n * @param {Number} size The size of the pool. Defaults to the number of CPUs\\\\n * available. When this parameter is `null` or 0, then the\\\\n * decoding will be done in the main thread.\\\\n */\\\\n constructor(size = defaultPoolSize) {\\\\n const worker = new threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Worker\\\\\\\"](__webpack__worker__1);\\\\n this.pool = Object(threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Pool\\\\\\\"])(() => Object(threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"spawn\\\\\\\"])(worker), size);\\\\n }\\\\n\\\\n /**\\\\n * Decode the given block of bytes with the set compression method.\\\\n * @param {ArrayBuffer} buffer the array buffer of bytes to decode.\\\\n * @returns {Promise.<ArrayBuffer>} the decoded result as a `Promise`\\\\n */\\\\n async decode(fileDirectory, buffer) {\\\\n return new Promise((resolve, reject) => {\\\\n this.pool.queue(async (decode) => {\\\\n try {\\\\n const data = await decode(fileDirectory, buffer);\\\\n resolve(data);\\\\n } catch (err) {\\\\n reject(err);\\\\n }\\\\n });\\\\n });\\\\n }\\\\n\\\\n destroy() {\\\\n this.pool.terminate(true);\\\\n }\\\\n}\\\\n\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (Pool);\\\\n\\\\n/* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./node_modules/threads-plugin/dist/loader.js?{\\\\\\\"name\\\\\\\":\\\\\\\"1\\\\\\\"}!./decoder.worker.js */ \\\\\\\"./node_modules/threads-plugin/dist/loader.js?{\\\\\\\\\\\\\\\"name\\\\\\\\\\\\\\\":\\\\\\\\\\\\\\\"1\\\\\\\\\\\\\\\"}!./node_modules/geotiff/src/decoder.worker.js\\\\\\\")))\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/pool.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/predictor.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/predictor.js ***!\\n \\\\***********************************************/\\n/*! exports provided: applyPredictor */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"applyPredictor\\\\\\\", function() { return applyPredictor; });\\\\n\\\\nfunction decodeRowAcc(row, stride) {\\\\n let length = row.length - stride;\\\\n let offset = 0;\\\\n do {\\\\n for (let i = stride; i > 0; i--) {\\\\n row[offset + stride] += row[offset];\\\\n offset++;\\\\n }\\\\n\\\\n length -= stride;\\\\n } while (length > 0);\\\\n}\\\\n\\\\nfunction decodeRowFloatingPoint(row, stride, bytesPerSample) {\\\\n let index = 0;\\\\n let count = row.length;\\\\n const wc = count / bytesPerSample;\\\\n\\\\n while (count > stride) {\\\\n for (let i = stride; i > 0; --i) {\\\\n row[index + stride] += row[index];\\\\n ++index;\\\\n }\\\\n count -= stride;\\\\n }\\\\n\\\\n const copy = row.slice();\\\\n for (let i = 0; i < wc; ++i) {\\\\n for (let b = 0; b < bytesPerSample; ++b) {\\\\n row[(bytesPerSample * i) + b] = copy[((bytesPerSample - b - 1) * wc) + i];\\\\n }\\\\n }\\\\n}\\\\n\\\\nfunction applyPredictor(block, predictor, width, height, bitsPerSample,\\\\n planarConfiguration) {\\\\n if (!predictor || predictor === 1) {\\\\n return block;\\\\n }\\\\n\\\\n for (let i = 0; i < bitsPerSample.length; ++i) {\\\\n if (bitsPerSample[i] % 8 !== 0) {\\\\n throw new Error('When decoding with predictor, only multiple of 8 bits are supported.');\\\\n }\\\\n if (bitsPerSample[i] !== bitsPerSample[0]) {\\\\n throw new Error('When decoding with predictor, all samples must have the same size.');\\\\n }\\\\n }\\\\n\\\\n const bytesPerSample = bitsPerSample[0] / 8;\\\\n const stride = planarConfiguration === 2 ? 1 : bitsPerSample.length;\\\\n\\\\n for (let i = 0; i < height; ++i) {\\\\n // Last strip will be truncated if height % stripHeight != 0\\\\n if (i * stride * width * bytesPerSample >= block.byteLength) {\\\\n break;\\\\n }\\\\n let row;\\\\n if (predictor === 2) { // horizontal prediction\\\\n switch (bitsPerSample[0]) {\\\\n case 8:\\\\n row = new Uint8Array(\\\\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,\\\\n );\\\\n break;\\\\n case 16:\\\\n row = new Uint16Array(\\\\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 2,\\\\n );\\\\n break;\\\\n case 32:\\\\n row = new Uint32Array(\\\\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample / 4,\\\\n );\\\\n break;\\\\n default:\\\\n throw new Error(`Predictor 2 not allowed with ${bitsPerSample[0]} bits per sample.`);\\\\n }\\\\n decodeRowAcc(row, stride, bytesPerSample);\\\\n } else if (predictor === 3) { // horizontal floating point\\\\n row = new Uint8Array(\\\\n block, i * stride * width * bytesPerSample, stride * width * bytesPerSample,\\\\n );\\\\n decodeRowFloatingPoint(row, stride, bytesPerSample);\\\\n }\\\\n }\\\\n return block;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/predictor.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/resample.js\\\":\\n/*!**********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/resample.js ***!\\n \\\\**********************************************/\\n/*! exports provided: resampleNearest, resampleBilinear, resample, resampleNearestInterleaved, resampleBilinearInterleaved, resampleInterleaved */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resampleNearest\\\\\\\", function() { return resampleNearest; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resampleBilinear\\\\\\\", function() { return resampleBilinear; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resample\\\\\\\", function() { return resample; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resampleNearestInterleaved\\\\\\\", function() { return resampleNearestInterleaved; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resampleBilinearInterleaved\\\\\\\", function() { return resampleBilinearInterleaved; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"resampleInterleaved\\\\\\\", function() { return resampleInterleaved; });\\\\n/**\\\\n * @module resample\\\\n */\\\\n\\\\nfunction copyNewSize(array, width, height, samplesPerPixel = 1) {\\\\n return new (Object.getPrototypeOf(array).constructor)(width * height * samplesPerPixel);\\\\n}\\\\n\\\\n/**\\\\n * Resample the input arrays using nearest neighbor value selection.\\\\n * @param {TypedArray[]} valueArrays The input arrays to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @returns {TypedArray[]} The resampled rasters\\\\n */\\\\nfunction resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight) {\\\\n const relX = inWidth / outWidth;\\\\n const relY = inHeight / outHeight;\\\\n return valueArrays.map((array) => {\\\\n const newArray = copyNewSize(array, outWidth, outHeight);\\\\n for (let y = 0; y < outHeight; ++y) {\\\\n const cy = Math.min(Math.round(relY * y), inHeight - 1);\\\\n for (let x = 0; x < outWidth; ++x) {\\\\n const cx = Math.min(Math.round(relX * x), inWidth - 1);\\\\n const value = array[(cy * inWidth) + cx];\\\\n newArray[(y * outWidth) + x] = value;\\\\n }\\\\n }\\\\n return newArray;\\\\n });\\\\n}\\\\n\\\\n// simple linear interpolation, code from:\\\\n// https://en.wikipedia.org/wiki/Linear_interpolation#Programming_language_support\\\\nfunction lerp(v0, v1, t) {\\\\n return ((1 - t) * v0) + (t * v1);\\\\n}\\\\n\\\\n/**\\\\n * Resample the input arrays using bilinear interpolation.\\\\n * @param {TypedArray[]} valueArrays The input arrays to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @returns {TypedArray[]} The resampled rasters\\\\n */\\\\nfunction resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight) {\\\\n const relX = inWidth / outWidth;\\\\n const relY = inHeight / outHeight;\\\\n\\\\n return valueArrays.map((array) => {\\\\n const newArray = copyNewSize(array, outWidth, outHeight);\\\\n for (let y = 0; y < outHeight; ++y) {\\\\n const rawY = relY * y;\\\\n\\\\n const yl = Math.floor(rawY);\\\\n const yh = Math.min(Math.ceil(rawY), (inHeight - 1));\\\\n\\\\n for (let x = 0; x < outWidth; ++x) {\\\\n const rawX = relX * x;\\\\n const tx = rawX % 1;\\\\n\\\\n const xl = Math.floor(rawX);\\\\n const xh = Math.min(Math.ceil(rawX), (inWidth - 1));\\\\n\\\\n const ll = array[(yl * inWidth) + xl];\\\\n const hl = array[(yl * inWidth) + xh];\\\\n const lh = array[(yh * inWidth) + xl];\\\\n const hh = array[(yh * inWidth) + xh];\\\\n\\\\n const value = lerp(\\\\n lerp(ll, hl, tx),\\\\n lerp(lh, hh, tx),\\\\n rawY % 1,\\\\n );\\\\n newArray[(y * outWidth) + x] = value;\\\\n }\\\\n }\\\\n return newArray;\\\\n });\\\\n}\\\\n\\\\n/**\\\\n * Resample the input arrays using the selected resampling method.\\\\n * @param {TypedArray[]} valueArrays The input arrays to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @param {string} [method = 'nearest'] The desired resampling method\\\\n * @returns {TypedArray[]} The resampled rasters\\\\n */\\\\nfunction resample(valueArrays, inWidth, inHeight, outWidth, outHeight, method = 'nearest') {\\\\n switch (method.toLowerCase()) {\\\\n case 'nearest':\\\\n return resampleNearest(valueArrays, inWidth, inHeight, outWidth, outHeight);\\\\n case 'bilinear':\\\\n case 'linear':\\\\n return resampleBilinear(valueArrays, inWidth, inHeight, outWidth, outHeight);\\\\n default:\\\\n throw new Error(`Unsupported resampling method: '${method}'`);\\\\n }\\\\n}\\\\n\\\\n/**\\\\n * Resample the pixel interleaved input array using nearest neighbor value selection.\\\\n * @param {TypedArray} valueArrays The input arrays to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @param {number} samples The number of samples per pixel for pixel\\\\n * interleaved data\\\\n * @returns {TypedArray} The resampled raster\\\\n */\\\\nfunction resampleNearestInterleaved(\\\\n valueArray, inWidth, inHeight, outWidth, outHeight, samples) {\\\\n const relX = inWidth / outWidth;\\\\n const relY = inHeight / outHeight;\\\\n\\\\n const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);\\\\n for (let y = 0; y < outHeight; ++y) {\\\\n const cy = Math.min(Math.round(relY * y), inHeight - 1);\\\\n for (let x = 0; x < outWidth; ++x) {\\\\n const cx = Math.min(Math.round(relX * x), inWidth - 1);\\\\n for (let i = 0; i < samples; ++i) {\\\\n const value = valueArray[(cy * inWidth * samples) + (cx * samples) + i];\\\\n newArray[(y * outWidth * samples) + (x * samples) + i] = value;\\\\n }\\\\n }\\\\n }\\\\n return newArray;\\\\n}\\\\n\\\\n/**\\\\n * Resample the pixel interleaved input array using bilinear interpolation.\\\\n * @param {TypedArray} valueArrays The input arrays to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @param {number} samples The number of samples per pixel for pixel\\\\n * interleaved data\\\\n * @returns {TypedArray} The resampled raster\\\\n */\\\\nfunction resampleBilinearInterleaved(\\\\n valueArray, inWidth, inHeight, outWidth, outHeight, samples) {\\\\n const relX = inWidth / outWidth;\\\\n const relY = inHeight / outHeight;\\\\n const newArray = copyNewSize(valueArray, outWidth, outHeight, samples);\\\\n for (let y = 0; y < outHeight; ++y) {\\\\n const rawY = relY * y;\\\\n\\\\n const yl = Math.floor(rawY);\\\\n const yh = Math.min(Math.ceil(rawY), (inHeight - 1));\\\\n\\\\n for (let x = 0; x < outWidth; ++x) {\\\\n const rawX = relX * x;\\\\n const tx = rawX % 1;\\\\n\\\\n const xl = Math.floor(rawX);\\\\n const xh = Math.min(Math.ceil(rawX), (inWidth - 1));\\\\n\\\\n for (let i = 0; i < samples; ++i) {\\\\n const ll = valueArray[(yl * inWidth * samples) + (xl * samples) + i];\\\\n const hl = valueArray[(yl * inWidth * samples) + (xh * samples) + i];\\\\n const lh = valueArray[(yh * inWidth * samples) + (xl * samples) + i];\\\\n const hh = valueArray[(yh * inWidth * samples) + (xh * samples) + i];\\\\n\\\\n const value = lerp(\\\\n lerp(ll, hl, tx),\\\\n lerp(lh, hh, tx),\\\\n rawY % 1,\\\\n );\\\\n newArray[(y * outWidth * samples) + (x * samples) + i] = value;\\\\n }\\\\n }\\\\n }\\\\n return newArray;\\\\n}\\\\n\\\\n/**\\\\n * Resample the pixel interleaved input array using the selected resampling method.\\\\n * @param {TypedArray} valueArray The input array to resample\\\\n * @param {number} inWidth The width of the input rasters\\\\n * @param {number} inHeight The height of the input rasters\\\\n * @param {number} outWidth The desired width of the output rasters\\\\n * @param {number} outHeight The desired height of the output rasters\\\\n * @param {number} samples The number of samples per pixel for pixel\\\\n * interleaved data\\\\n * @param {string} [method = 'nearest'] The desired resampling method\\\\n * @returns {TypedArray} The resampled rasters\\\\n */\\\\nfunction resampleInterleaved(valueArray, inWidth, inHeight, outWidth, outHeight, samples, method = 'nearest') {\\\\n switch (method.toLowerCase()) {\\\\n case 'nearest':\\\\n return resampleNearestInterleaved(\\\\n valueArray, inWidth, inHeight, outWidth, outHeight, samples,\\\\n );\\\\n case 'bilinear':\\\\n case 'linear':\\\\n return resampleBilinearInterleaved(\\\\n valueArray, inWidth, inHeight, outWidth, outHeight, samples,\\\\n );\\\\n default:\\\\n throw new Error(`Unsupported resampling method: '${method}'`);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/resample.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/rgb.js\\\":\\n/*!*****************************************!*\\\\\\n !*** ./node_modules/geotiff/src/rgb.js ***!\\n \\\\*****************************************/\\n/*! exports provided: fromWhiteIsZero, fromBlackIsZero, fromPalette, fromCMYK, fromYCbCr, fromCIELab */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromWhiteIsZero\\\\\\\", function() { return fromWhiteIsZero; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromBlackIsZero\\\\\\\", function() { return fromBlackIsZero; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromPalette\\\\\\\", function() { return fromPalette; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromCMYK\\\\\\\", function() { return fromCMYK; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromYCbCr\\\\\\\", function() { return fromYCbCr; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"fromCIELab\\\\\\\", function() { return fromCIELab; });\\\\nfunction fromWhiteIsZero(raster, max) {\\\\n const { width, height } = raster;\\\\n const rgbRaster = new Uint8Array(width * height * 3);\\\\n let value;\\\\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\\\\n value = 256 - (raster[i] / max * 256);\\\\n rgbRaster[j] = value;\\\\n rgbRaster[j + 1] = value;\\\\n rgbRaster[j + 2] = value;\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\nfunction fromBlackIsZero(raster, max) {\\\\n const { width, height } = raster;\\\\n const rgbRaster = new Uint8Array(width * height * 3);\\\\n let value;\\\\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\\\\n value = raster[i] / max * 256;\\\\n rgbRaster[j] = value;\\\\n rgbRaster[j + 1] = value;\\\\n rgbRaster[j + 2] = value;\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\nfunction fromPalette(raster, colorMap) {\\\\n const { width, height } = raster;\\\\n const rgbRaster = new Uint8Array(width * height * 3);\\\\n const greenOffset = colorMap.length / 3;\\\\n const blueOffset = colorMap.length / 3 * 2;\\\\n for (let i = 0, j = 0; i < raster.length; ++i, j += 3) {\\\\n const mapIndex = raster[i];\\\\n rgbRaster[j] = colorMap[mapIndex] / 65536 * 256;\\\\n rgbRaster[j + 1] = colorMap[mapIndex + greenOffset] / 65536 * 256;\\\\n rgbRaster[j + 2] = colorMap[mapIndex + blueOffset] / 65536 * 256;\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\nfunction fromCMYK(cmykRaster) {\\\\n const { width, height } = cmykRaster;\\\\n const rgbRaster = new Uint8Array(width * height * 3);\\\\n for (let i = 0, j = 0; i < cmykRaster.length; i += 4, j += 3) {\\\\n const c = cmykRaster[i];\\\\n const m = cmykRaster[i + 1];\\\\n const y = cmykRaster[i + 2];\\\\n const k = cmykRaster[i + 3];\\\\n\\\\n rgbRaster[j] = 255 * ((255 - c) / 256) * ((255 - k) / 256);\\\\n rgbRaster[j + 1] = 255 * ((255 - m) / 256) * ((255 - k) / 256);\\\\n rgbRaster[j + 2] = 255 * ((255 - y) / 256) * ((255 - k) / 256);\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\nfunction fromYCbCr(yCbCrRaster) {\\\\n const { width, height } = yCbCrRaster;\\\\n const rgbRaster = new Uint8ClampedArray(width * height * 3);\\\\n for (let i = 0, j = 0; i < yCbCrRaster.length; i += 3, j += 3) {\\\\n const y = yCbCrRaster[i];\\\\n const cb = yCbCrRaster[i + 1];\\\\n const cr = yCbCrRaster[i + 2];\\\\n\\\\n rgbRaster[j] = (y + (1.40200 * (cr - 0x80)));\\\\n rgbRaster[j + 1] = (y - (0.34414 * (cb - 0x80)) - (0.71414 * (cr - 0x80)));\\\\n rgbRaster[j + 2] = (y + (1.77200 * (cb - 0x80)));\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\nconst Xn = 0.95047;\\\\nconst Yn = 1.00000;\\\\nconst Zn = 1.08883;\\\\n\\\\n// from https://github.com/antimatter15/rgb-lab/blob/master/color.js\\\\n\\\\nfunction fromCIELab(cieLabRaster) {\\\\n const { width, height } = cieLabRaster;\\\\n const rgbRaster = new Uint8Array(width * height * 3);\\\\n\\\\n for (let i = 0, j = 0; i < cieLabRaster.length; i += 3, j += 3) {\\\\n const L = cieLabRaster[i + 0];\\\\n const a_ = cieLabRaster[i + 1] << 24 >> 24; // conversion from uint8 to int8\\\\n const b_ = cieLabRaster[i + 2] << 24 >> 24; // same\\\\n\\\\n let y = (L + 16) / 116;\\\\n let x = (a_ / 500) + y;\\\\n let z = y - (b_ / 200);\\\\n let r;\\\\n let g;\\\\n let b;\\\\n\\\\n x = Xn * ((x * x * x > 0.008856) ? x * x * x : (x - (16 / 116)) / 7.787);\\\\n y = Yn * ((y * y * y > 0.008856) ? y * y * y : (y - (16 / 116)) / 7.787);\\\\n z = Zn * ((z * z * z > 0.008856) ? z * z * z : (z - (16 / 116)) / 7.787);\\\\n\\\\n r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);\\\\n g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);\\\\n b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);\\\\n\\\\n r = (r > 0.0031308) ? ((1.055 * (r ** (1 / 2.4))) - 0.055) : 12.92 * r;\\\\n g = (g > 0.0031308) ? ((1.055 * (g ** (1 / 2.4))) - 0.055) : 12.92 * g;\\\\n b = (b > 0.0031308) ? ((1.055 * (b ** (1 / 2.4))) - 0.055) : 12.92 * b;\\\\n\\\\n rgbRaster[j] = Math.max(0, Math.min(1, r)) * 255;\\\\n rgbRaster[j + 1] = Math.max(0, Math.min(1, g)) * 255;\\\\n rgbRaster[j + 2] = Math.max(0, Math.min(1, b)) * 255;\\\\n }\\\\n return rgbRaster;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/rgb.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/source.js\\\":\\n/*!********************************************!*\\\\\\n !*** ./node_modules/geotiff/src/source.js ***!\\n \\\\********************************************/\\n/*! exports provided: makeFetchSource, makeXHRSource, makeHttpSource, makeRemoteSource, makeBufferSource, makeFileSource, makeFileReaderSource */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeFetchSource\\\\\\\", function() { return makeFetchSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeXHRSource\\\\\\\", function() { return makeXHRSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeHttpSource\\\\\\\", function() { return makeHttpSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeRemoteSource\\\\\\\", function() { return makeRemoteSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeBufferSource\\\\\\\", function() { return makeBufferSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeFileSource\\\\\\\", function() { return makeFileSource; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"makeFileReaderSource\\\\\\\", function() { return makeFileReaderSource; });\\\\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \\\\\\\"buffer\\\\\\\");\\\\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(buffer__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! fs */ \\\\\\\"fs\\\\\\\");\\\\n/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(fs__WEBPACK_IMPORTED_MODULE_1__);\\\\n/* harmony import */ var http__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! http */ \\\\\\\"http\\\\\\\");\\\\n/* harmony import */ var http__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(http__WEBPACK_IMPORTED_MODULE_2__);\\\\n/* harmony import */ var https__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! https */ \\\\\\\"https\\\\\\\");\\\\n/* harmony import */ var https__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(https__WEBPACK_IMPORTED_MODULE_3__);\\\\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! url */ \\\\\\\"url\\\\\\\");\\\\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(url__WEBPACK_IMPORTED_MODULE_4__);\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nfunction readRangeFromBlocks(blocks, rangeOffset, rangeLength) {\\\\n const rangeTop = rangeOffset + rangeLength;\\\\n const rangeData = new ArrayBuffer(rangeLength);\\\\n const rangeView = new Uint8Array(rangeData);\\\\n\\\\n for (const block of blocks) {\\\\n const delta = block.offset - rangeOffset;\\\\n const topDelta = block.top - rangeTop;\\\\n let blockInnerOffset = 0;\\\\n let rangeInnerOffset = 0;\\\\n let usedBlockLength;\\\\n\\\\n if (delta < 0) {\\\\n blockInnerOffset = -delta;\\\\n } else if (delta > 0) {\\\\n rangeInnerOffset = delta;\\\\n }\\\\n\\\\n if (topDelta < 0) {\\\\n usedBlockLength = block.length - blockInnerOffset;\\\\n } else {\\\\n usedBlockLength = rangeTop - block.offset - blockInnerOffset;\\\\n }\\\\n\\\\n const blockView = new Uint8Array(block.data, blockInnerOffset, usedBlockLength);\\\\n rangeView.set(blockView, rangeInnerOffset);\\\\n }\\\\n\\\\n return rangeData;\\\\n}\\\\n\\\\n/**\\\\n * Interface for Source objects.\\\\n * @interface Source\\\\n */\\\\n\\\\n/**\\\\n * @function Source#fetch\\\\n * @summary The main method to retrieve the data from the source.\\\\n * @param {number} offset The offset to read from in the source\\\\n * @param {number} length The requested number of bytes\\\\n */\\\\n\\\\n/**\\\\n * @typedef {object} Block\\\\n * @property {ArrayBuffer} data The actual data of the block.\\\\n * @property {number} offset The actual offset of the block within the file.\\\\n * @property {number} length The actual size of the block in bytes.\\\\n */\\\\n\\\\n/**\\\\n * Callback type for sources to request patches of data.\\\\n * @callback requestCallback\\\\n * @async\\\\n * @param {number} offset The offset within the file.\\\\n * @param {number} length The desired length of data to be read.\\\\n * @returns {Promise<Block>} The block of data.\\\\n */\\\\n\\\\n/**\\\\n * @module source\\\\n */\\\\n\\\\n/*\\\\n * Split a list of identifiers to form groups of coherent ones\\\\n */\\\\nfunction getCoherentBlockGroups(blockIds) {\\\\n if (blockIds.length === 0) {\\\\n return [];\\\\n }\\\\n\\\\n const groups = [];\\\\n let current = [];\\\\n groups.push(current);\\\\n\\\\n for (let i = 0; i < blockIds.length; ++i) {\\\\n if (i === 0 || blockIds[i] === blockIds[i - 1] + 1) {\\\\n current.push(blockIds[i]);\\\\n } else {\\\\n current = [blockIds[i]];\\\\n groups.push(current);\\\\n }\\\\n }\\\\n return groups;\\\\n}\\\\n\\\\n\\\\n/*\\\\n * Promisified wrapper around 'setTimeout' to allow 'await'\\\\n */\\\\nasync function wait(milliseconds) {\\\\n return new Promise((resolve) => setTimeout(resolve, milliseconds));\\\\n}\\\\n\\\\n/**\\\\n * BlockedSource - an abstraction of (remote) files.\\\\n * @implements Source\\\\n */\\\\nclass BlockedSource {\\\\n /**\\\\n * @param {requestCallback} retrievalFunction Callback function to request data\\\\n * @param {object} options Additional options\\\\n * @param {object} options.blockSize Size of blocks to be fetched\\\\n */\\\\n constructor(retrievalFunction, { blockSize = 65536 } = {}) {\\\\n this.retrievalFunction = retrievalFunction;\\\\n this.blockSize = blockSize;\\\\n\\\\n // currently running block requests\\\\n this.blockRequests = new Map();\\\\n\\\\n // already retrieved blocks\\\\n this.blocks = new Map();\\\\n\\\\n // block ids waiting for a batched request. Either a Set or null\\\\n this.blockIdsAwaitingRequest = null;\\\\n }\\\\n\\\\n /**\\\\n * Fetch a subset of the file.\\\\n * @param {number} offset The offset within the file to read from.\\\\n * @param {number} length The length in bytes to read from.\\\\n * @returns {ArrayBuffer} The subset of the file.\\\\n */\\\\n async fetch(offset, length, immediate = false) {\\\\n const top = offset + length;\\\\n\\\\n // calculate what blocks intersect the specified range (offset + length)\\\\n // determine what blocks are already stored or beeing requested\\\\n const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;\\\\n const allBlockIds = [];\\\\n const missingBlockIds = [];\\\\n const blockRequests = [];\\\\n\\\\n for (let current = firstBlockOffset; current < top; current += this.blockSize) {\\\\n const blockId = Math.floor(current / this.blockSize);\\\\n if (!this.blocks.has(blockId) && !this.blockRequests.has(blockId)) {\\\\n missingBlockIds.push(blockId);\\\\n }\\\\n if (this.blockRequests.has(blockId)) {\\\\n blockRequests.push(this.blockRequests.get(blockId));\\\\n }\\\\n allBlockIds.push(blockId);\\\\n }\\\\n\\\\n // determine whether there are already blocks in the queue to be requested\\\\n // if so, add the missing blocks to this list\\\\n if (!this.blockIdsAwaitingRequest) {\\\\n this.blockIdsAwaitingRequest = new Set(missingBlockIds);\\\\n } else {\\\\n for (let i = 0; i < missingBlockIds.length; ++i) {\\\\n const id = missingBlockIds[i];\\\\n this.blockIdsAwaitingRequest.add(id);\\\\n }\\\\n }\\\\n\\\\n // in immediate mode, we don't want to wait for possible additional requests coming in\\\\n if (!immediate) {\\\\n await wait();\\\\n }\\\\n\\\\n // determine if we are the thread to start the requests.\\\\n if (this.blockIdsAwaitingRequest) {\\\\n // get all coherent blocks as groups to be requested in a single request\\\\n const groups = getCoherentBlockGroups(\\\\n Array.from(this.blockIdsAwaitingRequest).sort(),\\\\n );\\\\n\\\\n // iterate over all blocks\\\\n for (const group of groups) {\\\\n // fetch a group as in a single request\\\\n const request = this.requestData(\\\\n group[0] * this.blockSize, group.length * this.blockSize,\\\\n );\\\\n\\\\n // for each block in the request, make a small 'splitter',\\\\n // i.e: wait for the request to finish, then cut out the bytes for\\\\n // that block and store it there.\\\\n // we keep that as a promise in 'blockRequests' to allow waiting on\\\\n // a single block.\\\\n for (let i = 0; i < group.length; ++i) {\\\\n const id = group[i];\\\\n this.blockRequests.set(id, (async () => {\\\\n const response = await request;\\\\n const o = i * this.blockSize;\\\\n const t = Math.min(o + this.blockSize, response.data.byteLength);\\\\n const data = response.data.slice(o, t);\\\\n this.blockRequests.delete(id);\\\\n this.blocks.set(id, {\\\\n data,\\\\n offset: response.offset + o,\\\\n length: data.byteLength,\\\\n top: response.offset + t,\\\\n });\\\\n })());\\\\n }\\\\n }\\\\n this.blockIdsAwaitingRequest = null;\\\\n }\\\\n\\\\n // get a list of currently running requests for the blocks still missing\\\\n const missingRequests = [];\\\\n for (const blockId of missingBlockIds) {\\\\n if (this.blockRequests.has(blockId)) {\\\\n missingRequests.push(this.blockRequests.get(blockId));\\\\n }\\\\n }\\\\n\\\\n // wait for all missing requests to finish\\\\n await Promise.all(missingRequests);\\\\n await Promise.all(blockRequests);\\\\n\\\\n // now get all blocks for the request and return a summary buffer\\\\n const blocks = allBlockIds.map((id) => this.blocks.get(id));\\\\n return readRangeFromBlocks(blocks, offset, length);\\\\n }\\\\n\\\\n async requestData(requestedOffset, requestedLength) {\\\\n const response = await this.retrievalFunction(requestedOffset, requestedLength);\\\\n if (!response.length) {\\\\n response.length = response.data.byteLength;\\\\n } else if (response.length !== response.data.byteLength) {\\\\n response.data = response.data.slice(0, response.length);\\\\n }\\\\n response.top = response.offset + response.length;\\\\n return response;\\\\n }\\\\n}\\\\n\\\\n/**\\\\n * Create a new source to read from a remote file using the\\\\n * [fetch]{@link https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API} API.\\\\n * @param {string} url The URL to send requests to.\\\\n * @param {Object} [options] Additional options.\\\\n * @param {Number} [options.blockSize] The block size to use.\\\\n * @param {object} [options.headers] Additional headers to be sent to the server.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeFetchSource(url, { headers = {}, blockSize } = {}) {\\\\n return new BlockedSource(async (offset, length) => {\\\\n const response = await fetch(url, {\\\\n headers: {\\\\n ...headers, Range: `bytes=${offset}-${offset + length - 1}`,\\\\n },\\\\n });\\\\n\\\\n // check the response was okay and if the server actually understands range requests\\\\n if (!response.ok) {\\\\n throw new Error('Error fetching data.');\\\\n } else if (response.status === 206) {\\\\n const data = response.arrayBuffer\\\\n ? await response.arrayBuffer() : (await response.buffer()).buffer;\\\\n return {\\\\n data,\\\\n offset,\\\\n length,\\\\n };\\\\n } else {\\\\n const data = response.arrayBuffer\\\\n ? await response.arrayBuffer() : (await response.buffer()).buffer;\\\\n return {\\\\n data,\\\\n offset: 0,\\\\n length: data.byteLength,\\\\n };\\\\n }\\\\n }, { blockSize });\\\\n}\\\\n\\\\n/**\\\\n * Create a new source to read from a remote file using the\\\\n * [XHR]{@link https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest} API.\\\\n * @param {string} url The URL to send requests to.\\\\n * @param {Object} [options] Additional options.\\\\n * @param {Number} [options.blockSize] The block size to use.\\\\n * @param {object} [options.headers] Additional headers to be sent to the server.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeXHRSource(url, { headers = {}, blockSize } = {}) {\\\\n return new BlockedSource(async (offset, length) => {\\\\n return new Promise((resolve, reject) => {\\\\n const request = new XMLHttpRequest();\\\\n request.open('GET', url);\\\\n request.responseType = 'arraybuffer';\\\\n const requestHeaders = { ...headers, Range: `bytes=${offset}-${offset + length - 1}` };\\\\n for (const [key, value] of Object.entries(requestHeaders)) {\\\\n request.setRequestHeader(key, value);\\\\n }\\\\n\\\\n request.onload = () => {\\\\n const data = request.response;\\\\n if (request.status === 206) {\\\\n resolve({\\\\n data,\\\\n offset,\\\\n length,\\\\n });\\\\n } else {\\\\n resolve({\\\\n data,\\\\n offset: 0,\\\\n length: data.byteLength,\\\\n });\\\\n }\\\\n };\\\\n request.onerror = reject;\\\\n request.send();\\\\n });\\\\n }, { blockSize });\\\\n}\\\\n\\\\n/**\\\\n * Create a new source to read from a remote file using the node\\\\n * [http]{@link https://nodejs.org/api/http.html} API.\\\\n * @param {string} url The URL to send requests to.\\\\n * @param {Object} [options] Additional options.\\\\n * @param {Number} [options.blockSize] The block size to use.\\\\n * @param {object} [options.headers] Additional headers to be sent to the server.\\\\n */\\\\nfunction makeHttpSource(url, { headers = {}, blockSize } = {}) {\\\\n return new BlockedSource(async (offset, length) => new Promise((resolve, reject) => {\\\\n const parsed = url__WEBPACK_IMPORTED_MODULE_4___default.a.parse(url);\\\\n const request = (parsed.protocol === 'http:' ? http__WEBPACK_IMPORTED_MODULE_2___default.a : https__WEBPACK_IMPORTED_MODULE_3___default.a).get(\\\\n { ...parsed,\\\\n headers: {\\\\n ...headers, Range: `bytes=${offset}-${offset + length - 1}`,\\\\n } }, (result) => {\\\\n const chunks = [];\\\\n // collect chunks\\\\n result.on('data', (chunk) => {\\\\n chunks.push(chunk);\\\\n });\\\\n\\\\n // concatenate all chunks and resolve the promise with the resulting buffer\\\\n result.on('end', () => {\\\\n const data = buffer__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Buffer\\\\\\\"].concat(chunks).buffer;\\\\n resolve({\\\\n data,\\\\n offset,\\\\n length: data.byteLength,\\\\n });\\\\n });\\\\n },\\\\n );\\\\n request.on('error', reject);\\\\n }), { blockSize });\\\\n}\\\\n\\\\n/**\\\\n * Create a new source to read from a remote file. Uses either XHR, fetch or nodes http API.\\\\n * @param {string} url The URL to send requests to.\\\\n * @param {Object} [options] Additional options.\\\\n * @param {Boolean} [options.forceXHR] Force the usage of XMLHttpRequest.\\\\n * @param {Number} [options.blockSize] The block size to use.\\\\n * @param {object} [options.headers] Additional headers to be sent to the server.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeRemoteSource(url, options) {\\\\n const { forceXHR } = options;\\\\n if (typeof fetch === 'function' && !forceXHR) {\\\\n return makeFetchSource(url, options);\\\\n }\\\\n if (typeof XMLHttpRequest !== 'undefined') {\\\\n return makeXHRSource(url, options);\\\\n }\\\\n if (http__WEBPACK_IMPORTED_MODULE_2___default.a.get) {\\\\n return makeHttpSource(url, options);\\\\n }\\\\n throw new Error('No remote source available');\\\\n}\\\\n\\\\n/**\\\\n * Create a new source to read from a local\\\\n * [ArrayBuffer]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer}.\\\\n * @param {ArrayBuffer} arrayBuffer The ArrayBuffer to parse the GeoTIFF from.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeBufferSource(arrayBuffer) {\\\\n return {\\\\n async fetch(offset, length) {\\\\n return arrayBuffer.slice(offset, offset + length);\\\\n },\\\\n };\\\\n}\\\\n\\\\nfunction closeAsync(fd) {\\\\n return new Promise((resolve, reject) => {\\\\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"close\\\\\\\"])(fd, err => {\\\\n if (err) {\\\\n reject(err)\\\\n } else {\\\\n resolve()\\\\n }\\\\n });\\\\n });\\\\n}\\\\n\\\\nfunction openAsync(path, flags, mode = undefined) {\\\\n return new Promise((resolve, reject) => {\\\\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"open\\\\\\\"])(path, flags, mode, (err, fd) => {\\\\n if (err) {\\\\n reject(err);\\\\n } else {\\\\n resolve(fd);\\\\n }\\\\n });\\\\n });\\\\n}\\\\n\\\\nfunction readAsync(...args) {\\\\n return new Promise((resolve, reject) => {\\\\n Object(fs__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"read\\\\\\\"])(...args, (err, bytesRead, buffer) => {\\\\n if (err) {\\\\n reject(err);\\\\n } else {\\\\n resolve({ bytesRead, buffer });\\\\n }\\\\n });\\\\n });\\\\n}\\\\n\\\\n/**\\\\n * Creates a new source using the node filesystem API.\\\\n * @param {string} path The path to the file in the local filesystem.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeFileSource(path) {\\\\n const fileOpen = openAsync(path, 'r');\\\\n\\\\n return {\\\\n async fetch(offset, length) {\\\\n const fd = await fileOpen;\\\\n const { buffer } = await readAsync(fd, buffer__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Buffer\\\\\\\"].alloc(length), 0, length, offset);\\\\n return buffer.buffer;\\\\n },\\\\n async close() {\\\\n const fd = await fileOpen;\\\\n return await closeAsync(fd);\\\\n },\\\\n };\\\\n}\\\\n\\\\n/**\\\\n * Create a new source from a given file/blob.\\\\n * @param {Blob} file The file or blob to read from.\\\\n * @returns The constructed source\\\\n */\\\\nfunction makeFileReaderSource(file) {\\\\n return {\\\\n async fetch(offset, length) {\\\\n return new Promise((resolve, reject) => {\\\\n const blob = file.slice(offset, offset + length);\\\\n const reader = new FileReader();\\\\n reader.onload = (event) => resolve(event.target.result);\\\\n reader.onerror = reject;\\\\n reader.readAsArrayBuffer(blob);\\\\n });\\\\n },\\\\n };\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/source.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/geotiff/src/utils.js\\\":\\n/*!*******************************************!*\\\\\\n !*** ./node_modules/geotiff/src/utils.js ***!\\n \\\\*******************************************/\\n/*! exports provided: assign, chunk, endsWith, forEach, invert, range, times, toArray, toArrayRecursively */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"assign\\\\\\\", function() { return assign; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"chunk\\\\\\\", function() { return chunk; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"endsWith\\\\\\\", function() { return endsWith; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"forEach\\\\\\\", function() { return forEach; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"invert\\\\\\\", function() { return invert; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"range\\\\\\\", function() { return range; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"times\\\\\\\", function() { return times; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"toArray\\\\\\\", function() { return toArray; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"toArrayRecursively\\\\\\\", function() { return toArrayRecursively; });\\\\nfunction assign(target, source) {\\\\n for (const key in source) {\\\\n if (source.hasOwnProperty(key)) {\\\\n target[key] = source[key];\\\\n }\\\\n }\\\\n}\\\\n\\\\nfunction chunk(iterable, length) {\\\\n const results = [];\\\\n const lengthOfIterable = iterable.length;\\\\n for (let i = 0; i < lengthOfIterable; i += length) {\\\\n const chunked = [];\\\\n for (let ci = i; ci < i + length; ci++) {\\\\n chunked.push(iterable[ci]);\\\\n }\\\\n results.push(chunked);\\\\n }\\\\n return results;\\\\n}\\\\n\\\\nfunction endsWith(string, expectedEnding) {\\\\n if (string.length < expectedEnding.length) {\\\\n return false;\\\\n }\\\\n const actualEnding = string.substr(string.length - expectedEnding.length);\\\\n return actualEnding === expectedEnding;\\\\n}\\\\n\\\\nfunction forEach(iterable, func) {\\\\n const { length } = iterable;\\\\n for (let i = 0; i < length; i++) {\\\\n func(iterable[i], i);\\\\n }\\\\n}\\\\n\\\\nfunction invert(oldObj) {\\\\n const newObj = {};\\\\n for (const key in oldObj) {\\\\n if (oldObj.hasOwnProperty(key)) {\\\\n const value = oldObj[key];\\\\n newObj[value] = key;\\\\n }\\\\n }\\\\n return newObj;\\\\n}\\\\n\\\\nfunction range(n) {\\\\n const results = [];\\\\n for (let i = 0; i < n; i++) {\\\\n results.push(i);\\\\n }\\\\n return results;\\\\n}\\\\n\\\\nfunction times(numTimes, func) {\\\\n const results = [];\\\\n for (let i = 0; i < numTimes; i++) {\\\\n results.push(func(i));\\\\n }\\\\n return results;\\\\n}\\\\n\\\\nfunction toArray(iterable) {\\\\n const results = [];\\\\n const { length } = iterable;\\\\n for (let i = 0; i < length; i++) {\\\\n results.push(iterable[i]);\\\\n }\\\\n return results;\\\\n}\\\\n\\\\nfunction toArrayRecursively(input) {\\\\n if (input.length) {\\\\n return toArray(input).map(toArrayRecursively);\\\\n }\\\\n return input;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/utils.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/inherits/inherits.js\\\":\\n/*!*******************************************!*\\\\\\n !*** ./node_modules/inherits/inherits.js ***!\\n \\\\*******************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"try {\\\\n var util = __webpack_require__(/*! util */ \\\\\\\"util\\\\\\\");\\\\n /* istanbul ignore next */\\\\n if (typeof util.inherits !== 'function') throw '';\\\\n module.exports = util.inherits;\\\\n} catch (e) {\\\\n /* istanbul ignore next */\\\\n module.exports = __webpack_require__(/*! ./inherits_browser.js */ \\\\\\\"./node_modules/inherits/inherits_browser.js\\\\\\\");\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/inherits/inherits.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/inherits/inherits_browser.js\\\":\\n/*!***************************************************!*\\\\\\n !*** ./node_modules/inherits/inherits_browser.js ***!\\n \\\\***************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"if (typeof Object.create === 'function') {\\\\n // implementation from standard node.js 'util' module\\\\n module.exports = function inherits(ctor, superCtor) {\\\\n if (superCtor) {\\\\n ctor.super_ = superCtor\\\\n ctor.prototype = Object.create(superCtor.prototype, {\\\\n constructor: {\\\\n value: ctor,\\\\n enumerable: false,\\\\n writable: true,\\\\n configurable: true\\\\n }\\\\n })\\\\n }\\\\n };\\\\n} else {\\\\n // old school shim for old browsers\\\\n module.exports = function inherits(ctor, superCtor) {\\\\n if (superCtor) {\\\\n ctor.super_ = superCtor\\\\n var TempCtor = function () {}\\\\n TempCtor.prototype = superCtor.prototype\\\\n ctor.prototype = new TempCtor()\\\\n ctor.prototype.constructor = ctor\\\\n }\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/inherits/inherits_browser.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/is-observable/index.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/is-observable/index.js ***!\\n \\\\*********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nmodule.exports = value => {\\\\n\\\\tif (!value) {\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\t// eslint-disable-next-line no-use-extend-native/no-use-extend-native\\\\n\\\\tif (typeof Symbol.observable === 'symbol' && typeof value[Symbol.observable] === 'function') {\\\\n\\\\t\\\\t// eslint-disable-next-line no-use-extend-native/no-use-extend-native\\\\n\\\\t\\\\treturn value === value[Symbol.observable]();\\\\n\\\\t}\\\\n\\\\n\\\\tif (typeof value['@@observable'] === 'function') {\\\\n\\\\t\\\\treturn value === value['@@observable']();\\\\n\\\\t}\\\\n\\\\n\\\\treturn false;\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/is-observable/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/isarray/index.js\\\":\\n/*!***************************************!*\\\\\\n !*** ./node_modules/isarray/index.js ***!\\n \\\\***************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"var toString = {}.toString;\\\\n\\\\nmodule.exports = Array.isArray || function (arr) {\\\\n return toString.call(arr) == '[object Array]';\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/isarray/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/_scheduler.js\\\":\\n/*!************************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/_scheduler.js ***!\\n \\\\************************************************************/\\n/*! exports provided: AsyncSerialScheduler */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"AsyncSerialScheduler\\\\\\\", function() { return AsyncSerialScheduler; });\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\nclass AsyncSerialScheduler {\\\\n constructor(observer) {\\\\n this._baseObserver = observer;\\\\n this._pendingPromises = new Set();\\\\n }\\\\n complete() {\\\\n Promise.all(this._pendingPromises)\\\\n .then(() => this._baseObserver.complete())\\\\n .catch(error => this._baseObserver.error(error));\\\\n }\\\\n error(error) {\\\\n this._baseObserver.error(error);\\\\n }\\\\n schedule(task) {\\\\n const prevPromisesCompletion = Promise.all(this._pendingPromises);\\\\n const values = [];\\\\n const next = (value) => values.push(value);\\\\n const promise = Promise.resolve()\\\\n .then(() => __awaiter(this, void 0, void 0, function* () {\\\\n yield prevPromisesCompletion;\\\\n yield task(next);\\\\n this._pendingPromises.delete(promise);\\\\n for (const value of values) {\\\\n this._baseObserver.next(value);\\\\n }\\\\n }))\\\\n .catch(error => {\\\\n this._pendingPromises.delete(promise);\\\\n this._baseObserver.error(error);\\\\n });\\\\n this._pendingPromises.add(promise);\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_scheduler.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/_symbols.js\\\":\\n/*!**********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/_symbols.js ***!\\n \\\\**********************************************************/\\n/*! exports provided: hasSymbols, hasSymbol, getSymbol, registerObservableSymbol */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"hasSymbols\\\\\\\", function() { return hasSymbols; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"hasSymbol\\\\\\\", function() { return hasSymbol; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getSymbol\\\\\\\", function() { return getSymbol; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"registerObservableSymbol\\\\\\\", function() { return registerObservableSymbol; });\\\\nconst hasSymbols = () => typeof Symbol === \\\\\\\"function\\\\\\\";\\\\nconst hasSymbol = (name) => hasSymbols() && Boolean(Symbol[name]);\\\\nconst getSymbol = (name) => hasSymbol(name) ? Symbol[name] : \\\\\\\"@@\\\\\\\" + name;\\\\nfunction registerObservableSymbol() {\\\\n if (hasSymbols() && !hasSymbol(\\\\\\\"observable\\\\\\\")) {\\\\n Symbol.observable = Symbol(\\\\\\\"observable\\\\\\\");\\\\n }\\\\n}\\\\nif (!hasSymbol(\\\\\\\"asyncIterator\\\\\\\")) {\\\\n Symbol.asyncIterator = Symbol.asyncIterator || Symbol.for(\\\\\\\"Symbol.asyncIterator\\\\\\\");\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_symbols.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/_util.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/_util.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: isAsyncIterator, isIterator */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isAsyncIterator\\\\\\\", function() { return isAsyncIterator; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isIterator\\\\\\\", function() { return isIterator; });\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_symbols */ \\\\\\\"./node_modules/observable-fns/dist.esm/_symbols.js\\\\\\\");\\\\n/// <reference lib=\\\\\\\"es2018\\\\\\\" />\\\\n\\\\nfunction isAsyncIterator(thing) {\\\\n return thing && Object(_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"hasSymbol\\\\\\\"])(\\\\\\\"asyncIterator\\\\\\\") && thing[Symbol.asyncIterator];\\\\n}\\\\nfunction isIterator(thing) {\\\\n return thing && Object(_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"hasSymbol\\\\\\\"])(\\\\\\\"iterator\\\\\\\") && thing[Symbol.iterator];\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/_util.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/filter.js\\\":\\n/*!********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/filter.js ***!\\n \\\\********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \\\\\\\"./node_modules/observable-fns/dist.esm/_scheduler.js\\\\\\\");\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\n/**\\\\n * Filters the values emitted by another observable.\\\\n * To be applied to an input observable using `pipe()`.\\\\n */\\\\nfunction filter(test) {\\\\n return (observable) => {\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"](observer => {\\\\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"AsyncSerialScheduler\\\\\\\"](observer);\\\\n const subscription = observable.subscribe({\\\\n complete() {\\\\n scheduler.complete();\\\\n },\\\\n error(error) {\\\\n scheduler.error(error);\\\\n },\\\\n next(input) {\\\\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\\\\n if (yield test(input)) {\\\\n next(input);\\\\n }\\\\n }));\\\\n }\\\\n });\\\\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"])(subscription);\\\\n });\\\\n };\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (filter);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/filter.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/flatMap.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/flatMap.js ***!\\n \\\\*********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \\\\\\\"./node_modules/observable-fns/dist.esm/_scheduler.js\\\\\\\");\\\\n/* harmony import */ var _util__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./_util */ \\\\\\\"./node_modules/observable-fns/dist.esm/_util.js\\\\\\\");\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\nvar __asyncValues = (undefined && undefined.__asyncValues) || function (o) {\\\\n if (!Symbol.asyncIterator) throw new TypeError(\\\\\\\"Symbol.asyncIterator is not defined.\\\\\\\");\\\\n var m = o[Symbol.asyncIterator], i;\\\\n return m ? m.call(o) : (o = typeof __values === \\\\\\\"function\\\\\\\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\\\\\\\"next\\\\\\\"), verb(\\\\\\\"throw\\\\\\\"), verb(\\\\\\\"return\\\\\\\"), i[Symbol.asyncIterator] = function () { return this; }, i);\\\\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\\\\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\\\\n};\\\\n\\\\n\\\\n\\\\n\\\\n/**\\\\n * Maps the values emitted by another observable. In contrast to `map()`\\\\n * the `mapper` function returns an array of values that will be emitted\\\\n * separately.\\\\n * Use `flatMap()` to map input values to zero, one or multiple output\\\\n * values. To be applied to an input observable using `pipe()`.\\\\n */\\\\nfunction flatMap(mapper) {\\\\n return (observable) => {\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"](observer => {\\\\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"AsyncSerialScheduler\\\\\\\"](observer);\\\\n const subscription = observable.subscribe({\\\\n complete() {\\\\n scheduler.complete();\\\\n },\\\\n error(error) {\\\\n scheduler.error(error);\\\\n },\\\\n next(input) {\\\\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\\\\n var e_1, _a;\\\\n const mapped = yield mapper(input);\\\\n if (Object(_util__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"isIterator\\\\\\\"])(mapped) || Object(_util__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"isAsyncIterator\\\\\\\"])(mapped)) {\\\\n try {\\\\n for (var mapped_1 = __asyncValues(mapped), mapped_1_1; mapped_1_1 = yield mapped_1.next(), !mapped_1_1.done;) {\\\\n const element = mapped_1_1.value;\\\\n next(element);\\\\n }\\\\n }\\\\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\\\\n finally {\\\\n try {\\\\n if (mapped_1_1 && !mapped_1_1.done && (_a = mapped_1.return)) yield _a.call(mapped_1);\\\\n }\\\\n finally { if (e_1) throw e_1.error; }\\\\n }\\\\n }\\\\n else {\\\\n mapped.map(output => next(output));\\\\n }\\\\n }));\\\\n }\\\\n });\\\\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"default\\\\\\\"])(subscription);\\\\n });\\\\n };\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (flatMap);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/flatMap.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/index.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/index.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: filter, flatMap, interval, map, merge, multicast, Observable, scan, Subject, unsubscribe */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _filter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./filter */ \\\\\\\"./node_modules/observable-fns/dist.esm/filter.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"filter\\\\\\\", function() { return _filter__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _flatMap__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./flatMap */ \\\\\\\"./node_modules/observable-fns/dist.esm/flatMap.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"flatMap\\\\\\\", function() { return _flatMap__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _interval__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./interval */ \\\\\\\"./node_modules/observable-fns/dist.esm/interval.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"interval\\\\\\\", function() { return _interval__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _map__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./map */ \\\\\\\"./node_modules/observable-fns/dist.esm/map.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"map\\\\\\\", function() { return _map__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _merge__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./merge */ \\\\\\\"./node_modules/observable-fns/dist.esm/merge.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"merge\\\\\\\", function() { return _merge__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _multicast__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./multicast */ \\\\\\\"./node_modules/observable-fns/dist.esm/multicast.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"multicast\\\\\\\", function() { return _multicast__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Observable\\\\\\\", function() { return _observable__WEBPACK_IMPORTED_MODULE_6__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _scan__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./scan */ \\\\\\\"./node_modules/observable-fns/dist.esm/scan.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"scan\\\\\\\", function() { return _scan__WEBPACK_IMPORTED_MODULE_7__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _subject__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./subject */ \\\\\\\"./node_modules/observable-fns/dist.esm/subject.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Subject\\\\\\\", function() { return _subject__WEBPACK_IMPORTED_MODULE_8__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"unsubscribe\\\\\\\", function() { return _unsubscribe__WEBPACK_IMPORTED_MODULE_9__[\\\\\\\"default\\\\\\\"]; });\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/interval.js\\\":\\n/*!**********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/interval.js ***!\\n \\\\**********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return interval; });\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n\\\\n/**\\\\n * Creates an observable that yields a new value every `period` milliseconds.\\\\n * The first value emitted is 0, then 1, 2, etc. The first value is not emitted\\\\n * immediately, but after the first interval.\\\\n */\\\\nfunction interval(period) {\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Observable\\\\\\\"](observer => {\\\\n let counter = 0;\\\\n const handle = setInterval(() => {\\\\n observer.next(counter++);\\\\n }, period);\\\\n return () => clearInterval(handle);\\\\n });\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/interval.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/map.js\\\":\\n/*!*****************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/map.js ***!\\n \\\\*****************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \\\\\\\"./node_modules/observable-fns/dist.esm/_scheduler.js\\\\\\\");\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\n/**\\\\n * Maps the values emitted by another observable to different values.\\\\n * To be applied to an input observable using `pipe()`.\\\\n */\\\\nfunction map(mapper) {\\\\n return (observable) => {\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"](observer => {\\\\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"AsyncSerialScheduler\\\\\\\"](observer);\\\\n const subscription = observable.subscribe({\\\\n complete() {\\\\n scheduler.complete();\\\\n },\\\\n error(error) {\\\\n scheduler.error(error);\\\\n },\\\\n next(input) {\\\\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\\\\n const mapped = yield mapper(input);\\\\n next(mapped);\\\\n }));\\\\n }\\\\n });\\\\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"])(subscription);\\\\n });\\\\n };\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (map);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/map.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/merge.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/merge.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\n\\\\n\\\\nfunction merge(...observables) {\\\\n if (observables.length === 0) {\\\\n return _observable__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Observable\\\\\\\"].from([]);\\\\n }\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Observable\\\\\\\"](observer => {\\\\n let completed = 0;\\\\n const subscriptions = observables.map(input => {\\\\n return input.subscribe({\\\\n error(error) {\\\\n observer.error(error);\\\\n unsubscribeAll();\\\\n },\\\\n next(value) {\\\\n observer.next(value);\\\\n },\\\\n complete() {\\\\n if (++completed === observables.length) {\\\\n observer.complete();\\\\n unsubscribeAll();\\\\n }\\\\n }\\\\n });\\\\n });\\\\n const unsubscribeAll = () => {\\\\n subscriptions.forEach(subscription => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"])(subscription));\\\\n };\\\\n return unsubscribeAll;\\\\n });\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (merge);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/merge.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/multicast.js\\\":\\n/*!***********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/multicast.js ***!\\n \\\\***********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _subject__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./subject */ \\\\\\\"./node_modules/observable-fns/dist.esm/subject.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\n\\\\n\\\\n\\\\n// TODO: Subject already creates additional observables \\\\\\\"under the hood\\\\\\\",\\\\n// now we introduce even more. A true native MulticastObservable\\\\n// would be preferable.\\\\n/**\\\\n * Takes a \\\\\\\"cold\\\\\\\" observable and returns a wrapping \\\\\\\"hot\\\\\\\" observable that\\\\n * proxies the input observable's values and errors.\\\\n *\\\\n * An observable is called \\\\\\\"cold\\\\\\\" when its initialization function is run\\\\n * for each new subscriber. This is how observable-fns's `Observable`\\\\n * implementation works.\\\\n *\\\\n * A hot observable is an observable where new subscribers subscribe to\\\\n * the upcoming values of an already-initialiazed observable.\\\\n *\\\\n * The multicast observable will lazily subscribe to the source observable\\\\n * once it has its first own subscriber and will unsubscribe from the\\\\n * source observable when its last own subscriber unsubscribed.\\\\n */\\\\nfunction multicast(coldObservable) {\\\\n const subject = new _subject__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"]();\\\\n let sourceSubscription;\\\\n let subscriberCount = 0;\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"](observer => {\\\\n // Init source subscription lazily\\\\n if (!sourceSubscription) {\\\\n sourceSubscription = coldObservable.subscribe(subject);\\\\n }\\\\n // Pipe all events from `subject` into this observable\\\\n const subscription = subject.subscribe(observer);\\\\n subscriberCount++;\\\\n return () => {\\\\n subscriberCount--;\\\\n subscription.unsubscribe();\\\\n // Close source subscription once last subscriber has unsubscribed\\\\n if (subscriberCount === 0) {\\\\n Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"])(sourceSubscription);\\\\n sourceSubscription = undefined;\\\\n }\\\\n };\\\\n });\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (multicast);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/multicast.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/observable.js\\\":\\n/*!************************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/observable.js ***!\\n \\\\************************************************************/\\n/*! exports provided: Subscription, SubscriptionObserver, Observable, default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Subscription\\\\\\\", function() { return Subscription; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"SubscriptionObserver\\\\\\\", function() { return SubscriptionObserver; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Observable\\\\\\\", function() { return Observable; });\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./symbols */ \\\\\\\"./node_modules/observable-fns/dist.esm/symbols.js\\\\\\\");\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./_symbols */ \\\\\\\"./node_modules/observable-fns/dist.esm/_symbols.js\\\\\\\");\\\\n/**\\\\n * Based on <https://raw.githubusercontent.com/zenparsing/zen-observable/master/src/Observable.js>\\\\n * At commit: f63849a8c60af5d514efc8e9d6138d8273c49ad6\\\\n */\\\\n\\\\n\\\\nconst SymbolIterator = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"getSymbol\\\\\\\"])(\\\\\\\"iterator\\\\\\\");\\\\nconst SymbolObservable = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"getSymbol\\\\\\\"])(\\\\\\\"observable\\\\\\\");\\\\nconst SymbolSpecies = Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"getSymbol\\\\\\\"])(\\\\\\\"species\\\\\\\");\\\\n// === Abstract Operations ===\\\\nfunction getMethod(obj, key) {\\\\n const value = obj[key];\\\\n if (value == null) {\\\\n return undefined;\\\\n }\\\\n if (typeof value !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(value + \\\\\\\" is not a function\\\\\\\");\\\\n }\\\\n return value;\\\\n}\\\\nfunction getSpecies(obj) {\\\\n let ctor = obj.constructor;\\\\n if (ctor !== undefined) {\\\\n ctor = ctor[SymbolSpecies];\\\\n if (ctor === null) {\\\\n ctor = undefined;\\\\n }\\\\n }\\\\n return ctor !== undefined ? ctor : Observable;\\\\n}\\\\nfunction isObservable(x) {\\\\n return x instanceof Observable; // SPEC: Brand check\\\\n}\\\\nfunction hostReportError(error) {\\\\n if (hostReportError.log) {\\\\n hostReportError.log(error);\\\\n }\\\\n else {\\\\n setTimeout(() => { throw error; }, 0);\\\\n }\\\\n}\\\\nfunction enqueue(fn) {\\\\n Promise.resolve().then(() => {\\\\n try {\\\\n fn();\\\\n }\\\\n catch (e) {\\\\n hostReportError(e);\\\\n }\\\\n });\\\\n}\\\\nfunction cleanupSubscription(subscription) {\\\\n const cleanup = subscription._cleanup;\\\\n if (cleanup === undefined) {\\\\n return;\\\\n }\\\\n subscription._cleanup = undefined;\\\\n if (!cleanup) {\\\\n return;\\\\n }\\\\n try {\\\\n if (typeof cleanup === \\\\\\\"function\\\\\\\") {\\\\n cleanup();\\\\n }\\\\n else {\\\\n const unsubscribe = getMethod(cleanup, \\\\\\\"unsubscribe\\\\\\\");\\\\n if (unsubscribe) {\\\\n unsubscribe.call(cleanup);\\\\n }\\\\n }\\\\n }\\\\n catch (e) {\\\\n hostReportError(e);\\\\n }\\\\n}\\\\nfunction closeSubscription(subscription) {\\\\n subscription._observer = undefined;\\\\n subscription._queue = undefined;\\\\n subscription._state = \\\\\\\"closed\\\\\\\";\\\\n}\\\\nfunction flushSubscription(subscription) {\\\\n const queue = subscription._queue;\\\\n if (!queue) {\\\\n return;\\\\n }\\\\n subscription._queue = undefined;\\\\n subscription._state = \\\\\\\"ready\\\\\\\";\\\\n for (const item of queue) {\\\\n notifySubscription(subscription, item.type, item.value);\\\\n if (subscription._state === \\\\\\\"closed\\\\\\\") {\\\\n break;\\\\n }\\\\n }\\\\n}\\\\nfunction notifySubscription(subscription, type, value) {\\\\n subscription._state = \\\\\\\"running\\\\\\\";\\\\n const observer = subscription._observer;\\\\n try {\\\\n const m = observer ? getMethod(observer, type) : undefined;\\\\n switch (type) {\\\\n case \\\\\\\"next\\\\\\\":\\\\n if (m)\\\\n m.call(observer, value);\\\\n break;\\\\n case \\\\\\\"error\\\\\\\":\\\\n closeSubscription(subscription);\\\\n if (m)\\\\n m.call(observer, value);\\\\n else\\\\n throw value;\\\\n break;\\\\n case \\\\\\\"complete\\\\\\\":\\\\n closeSubscription(subscription);\\\\n if (m)\\\\n m.call(observer);\\\\n break;\\\\n }\\\\n }\\\\n catch (e) {\\\\n hostReportError(e);\\\\n }\\\\n if (subscription._state === \\\\\\\"closed\\\\\\\") {\\\\n cleanupSubscription(subscription);\\\\n }\\\\n else if (subscription._state === \\\\\\\"running\\\\\\\") {\\\\n subscription._state = \\\\\\\"ready\\\\\\\";\\\\n }\\\\n}\\\\nfunction onNotify(subscription, type, value) {\\\\n if (subscription._state === \\\\\\\"closed\\\\\\\") {\\\\n return;\\\\n }\\\\n if (subscription._state === \\\\\\\"buffering\\\\\\\") {\\\\n subscription._queue = subscription._queue || [];\\\\n subscription._queue.push({ type, value });\\\\n return;\\\\n }\\\\n if (subscription._state !== \\\\\\\"ready\\\\\\\") {\\\\n subscription._state = \\\\\\\"buffering\\\\\\\";\\\\n subscription._queue = [{ type, value }];\\\\n enqueue(() => flushSubscription(subscription));\\\\n return;\\\\n }\\\\n notifySubscription(subscription, type, value);\\\\n}\\\\nclass Subscription {\\\\n constructor(observer, subscriber) {\\\\n // ASSERT: observer is an object\\\\n // ASSERT: subscriber is callable\\\\n this._cleanup = undefined;\\\\n this._observer = observer;\\\\n this._queue = undefined;\\\\n this._state = \\\\\\\"initializing\\\\\\\";\\\\n const subscriptionObserver = new SubscriptionObserver(this);\\\\n try {\\\\n this._cleanup = subscriber.call(undefined, subscriptionObserver);\\\\n }\\\\n catch (e) {\\\\n subscriptionObserver.error(e);\\\\n }\\\\n if (this._state === \\\\\\\"initializing\\\\\\\") {\\\\n this._state = \\\\\\\"ready\\\\\\\";\\\\n }\\\\n }\\\\n get closed() {\\\\n return this._state === \\\\\\\"closed\\\\\\\";\\\\n }\\\\n unsubscribe() {\\\\n if (this._state !== \\\\\\\"closed\\\\\\\") {\\\\n closeSubscription(this);\\\\n cleanupSubscription(this);\\\\n }\\\\n }\\\\n}\\\\nclass SubscriptionObserver {\\\\n constructor(subscription) { this._subscription = subscription; }\\\\n get closed() { return this._subscription._state === \\\\\\\"closed\\\\\\\"; }\\\\n next(value) { onNotify(this._subscription, \\\\\\\"next\\\\\\\", value); }\\\\n error(value) { onNotify(this._subscription, \\\\\\\"error\\\\\\\", value); }\\\\n complete() { onNotify(this._subscription, \\\\\\\"complete\\\\\\\"); }\\\\n}\\\\n/**\\\\n * The basic Observable class. This primitive is used to wrap asynchronous\\\\n * data streams in a common standardized data type that is interoperable\\\\n * between libraries and can be composed to represent more complex processes.\\\\n */\\\\nclass Observable {\\\\n constructor(subscriber) {\\\\n if (!(this instanceof Observable)) {\\\\n throw new TypeError(\\\\\\\"Observable cannot be called as a function\\\\\\\");\\\\n }\\\\n if (typeof subscriber !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(\\\\\\\"Observable initializer must be a function\\\\\\\");\\\\n }\\\\n this._subscriber = subscriber;\\\\n }\\\\n subscribe(nextOrObserver, onError, onComplete) {\\\\n if (typeof nextOrObserver !== \\\\\\\"object\\\\\\\" || nextOrObserver === null) {\\\\n nextOrObserver = {\\\\n next: nextOrObserver,\\\\n error: onError,\\\\n complete: onComplete\\\\n };\\\\n }\\\\n return new Subscription(nextOrObserver, this._subscriber);\\\\n }\\\\n pipe(first, ...mappers) {\\\\n // tslint:disable-next-line no-this-assignment\\\\n let intermediate = this;\\\\n for (const mapper of [first, ...mappers]) {\\\\n intermediate = mapper(intermediate);\\\\n }\\\\n return intermediate;\\\\n }\\\\n tap(nextOrObserver, onError, onComplete) {\\\\n const tapObserver = typeof nextOrObserver !== \\\\\\\"object\\\\\\\" || nextOrObserver === null\\\\n ? {\\\\n next: nextOrObserver,\\\\n error: onError,\\\\n complete: onComplete\\\\n }\\\\n : nextOrObserver;\\\\n return new Observable(observer => {\\\\n return this.subscribe({\\\\n next(value) {\\\\n tapObserver.next && tapObserver.next(value);\\\\n observer.next(value);\\\\n },\\\\n error(error) {\\\\n tapObserver.error && tapObserver.error(error);\\\\n observer.error(error);\\\\n },\\\\n complete() {\\\\n tapObserver.complete && tapObserver.complete();\\\\n observer.complete();\\\\n },\\\\n start(subscription) {\\\\n tapObserver.start && tapObserver.start(subscription);\\\\n }\\\\n });\\\\n });\\\\n }\\\\n forEach(fn) {\\\\n return new Promise((resolve, reject) => {\\\\n if (typeof fn !== \\\\\\\"function\\\\\\\") {\\\\n reject(new TypeError(fn + \\\\\\\" is not a function\\\\\\\"));\\\\n return;\\\\n }\\\\n function done() {\\\\n subscription.unsubscribe();\\\\n resolve(undefined);\\\\n }\\\\n const subscription = this.subscribe({\\\\n next(value) {\\\\n try {\\\\n fn(value, done);\\\\n }\\\\n catch (e) {\\\\n reject(e);\\\\n subscription.unsubscribe();\\\\n }\\\\n },\\\\n error(error) {\\\\n reject(error);\\\\n },\\\\n complete() {\\\\n resolve(undefined);\\\\n }\\\\n });\\\\n });\\\\n }\\\\n map(fn) {\\\\n if (typeof fn !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(fn + \\\\\\\" is not a function\\\\\\\");\\\\n }\\\\n const C = getSpecies(this);\\\\n return new C(observer => this.subscribe({\\\\n next(value) {\\\\n let propagatedValue = value;\\\\n try {\\\\n propagatedValue = fn(value);\\\\n }\\\\n catch (e) {\\\\n return observer.error(e);\\\\n }\\\\n observer.next(propagatedValue);\\\\n },\\\\n error(e) { observer.error(e); },\\\\n complete() { observer.complete(); },\\\\n }));\\\\n }\\\\n filter(fn) {\\\\n if (typeof fn !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(fn + \\\\\\\" is not a function\\\\\\\");\\\\n }\\\\n const C = getSpecies(this);\\\\n return new C(observer => this.subscribe({\\\\n next(value) {\\\\n try {\\\\n if (!fn(value))\\\\n return;\\\\n }\\\\n catch (e) {\\\\n return observer.error(e);\\\\n }\\\\n observer.next(value);\\\\n },\\\\n error(e) { observer.error(e); },\\\\n complete() { observer.complete(); },\\\\n }));\\\\n }\\\\n reduce(fn, seed) {\\\\n if (typeof fn !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(fn + \\\\\\\" is not a function\\\\\\\");\\\\n }\\\\n const C = getSpecies(this);\\\\n const hasSeed = arguments.length > 1;\\\\n let hasValue = false;\\\\n let acc = seed;\\\\n return new C(observer => this.subscribe({\\\\n next(value) {\\\\n const first = !hasValue;\\\\n hasValue = true;\\\\n if (!first || hasSeed) {\\\\n try {\\\\n acc = fn(acc, value);\\\\n }\\\\n catch (e) {\\\\n return observer.error(e);\\\\n }\\\\n }\\\\n else {\\\\n acc = value;\\\\n }\\\\n },\\\\n error(e) { observer.error(e); },\\\\n complete() {\\\\n if (!hasValue && !hasSeed) {\\\\n return observer.error(new TypeError(\\\\\\\"Cannot reduce an empty sequence\\\\\\\"));\\\\n }\\\\n observer.next(acc);\\\\n observer.complete();\\\\n },\\\\n }));\\\\n }\\\\n concat(...sources) {\\\\n const C = getSpecies(this);\\\\n return new C(observer => {\\\\n let subscription;\\\\n let index = 0;\\\\n function startNext(next) {\\\\n subscription = next.subscribe({\\\\n next(v) { observer.next(v); },\\\\n error(e) { observer.error(e); },\\\\n complete() {\\\\n if (index === sources.length) {\\\\n subscription = undefined;\\\\n observer.complete();\\\\n }\\\\n else {\\\\n startNext(C.from(sources[index++]));\\\\n }\\\\n },\\\\n });\\\\n }\\\\n startNext(this);\\\\n return () => {\\\\n if (subscription) {\\\\n subscription.unsubscribe();\\\\n subscription = undefined;\\\\n }\\\\n };\\\\n });\\\\n }\\\\n flatMap(fn) {\\\\n if (typeof fn !== \\\\\\\"function\\\\\\\") {\\\\n throw new TypeError(fn + \\\\\\\" is not a function\\\\\\\");\\\\n }\\\\n const C = getSpecies(this);\\\\n return new C(observer => {\\\\n const subscriptions = [];\\\\n const outer = this.subscribe({\\\\n next(value) {\\\\n let normalizedValue;\\\\n if (fn) {\\\\n try {\\\\n normalizedValue = fn(value);\\\\n }\\\\n catch (e) {\\\\n return observer.error(e);\\\\n }\\\\n }\\\\n else {\\\\n normalizedValue = value;\\\\n }\\\\n const inner = C.from(normalizedValue).subscribe({\\\\n next(innerValue) { observer.next(innerValue); },\\\\n error(e) { observer.error(e); },\\\\n complete() {\\\\n const i = subscriptions.indexOf(inner);\\\\n if (i >= 0)\\\\n subscriptions.splice(i, 1);\\\\n completeIfDone();\\\\n },\\\\n });\\\\n subscriptions.push(inner);\\\\n },\\\\n error(e) { observer.error(e); },\\\\n complete() { completeIfDone(); },\\\\n });\\\\n function completeIfDone() {\\\\n if (outer.closed && subscriptions.length === 0) {\\\\n observer.complete();\\\\n }\\\\n }\\\\n return () => {\\\\n subscriptions.forEach(s => s.unsubscribe());\\\\n outer.unsubscribe();\\\\n };\\\\n });\\\\n }\\\\n [(Symbol.observable, SymbolObservable)]() { return this; }\\\\n static from(x) {\\\\n const C = (typeof this === \\\\\\\"function\\\\\\\" ? this : Observable);\\\\n if (x == null) {\\\\n throw new TypeError(x + \\\\\\\" is not an object\\\\\\\");\\\\n }\\\\n const observableMethod = getMethod(x, SymbolObservable);\\\\n if (observableMethod) {\\\\n const observable = observableMethod.call(x);\\\\n if (Object(observable) !== observable) {\\\\n throw new TypeError(observable + \\\\\\\" is not an object\\\\\\\");\\\\n }\\\\n if (isObservable(observable) && observable.constructor === C) {\\\\n return observable;\\\\n }\\\\n return new C(observer => observable.subscribe(observer));\\\\n }\\\\n if (Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"hasSymbol\\\\\\\"])(\\\\\\\"iterator\\\\\\\")) {\\\\n const iteratorMethod = getMethod(x, SymbolIterator);\\\\n if (iteratorMethod) {\\\\n return new C(observer => {\\\\n enqueue(() => {\\\\n if (observer.closed)\\\\n return;\\\\n for (const item of iteratorMethod.call(x)) {\\\\n observer.next(item);\\\\n if (observer.closed)\\\\n return;\\\\n }\\\\n observer.complete();\\\\n });\\\\n });\\\\n }\\\\n }\\\\n if (Array.isArray(x)) {\\\\n return new C(observer => {\\\\n enqueue(() => {\\\\n if (observer.closed)\\\\n return;\\\\n for (const item of x) {\\\\n observer.next(item);\\\\n if (observer.closed)\\\\n return;\\\\n }\\\\n observer.complete();\\\\n });\\\\n });\\\\n }\\\\n throw new TypeError(x + \\\\\\\" is not observable\\\\\\\");\\\\n }\\\\n static of(...items) {\\\\n const C = (typeof this === \\\\\\\"function\\\\\\\" ? this : Observable);\\\\n return new C(observer => {\\\\n enqueue(() => {\\\\n if (observer.closed)\\\\n return;\\\\n for (const item of items) {\\\\n observer.next(item);\\\\n if (observer.closed)\\\\n return;\\\\n }\\\\n observer.complete();\\\\n });\\\\n });\\\\n }\\\\n static get [SymbolSpecies]() { return this; }\\\\n}\\\\nif (Object(_symbols__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"hasSymbols\\\\\\\"])()) {\\\\n Object.defineProperty(Observable, Symbol(\\\\\\\"extensions\\\\\\\"), {\\\\n value: {\\\\n symbol: SymbolObservable,\\\\n hostReportError,\\\\n },\\\\n configurable: true,\\\\n });\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (Observable);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/observable.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/scan.js\\\":\\n/*!******************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/scan.js ***!\\n \\\\******************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _scheduler__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./_scheduler */ \\\\\\\"./node_modules/observable-fns/dist.esm/_scheduler.js\\\\\\\");\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n/* harmony import */ var _unsubscribe__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./unsubscribe */ \\\\\\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\\\\\");\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\nfunction scan(accumulator, seed) {\\\\n return (observable) => {\\\\n return new _observable__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"default\\\\\\\"](observer => {\\\\n let accumulated;\\\\n let index = 0;\\\\n const scheduler = new _scheduler__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"AsyncSerialScheduler\\\\\\\"](observer);\\\\n const subscription = observable.subscribe({\\\\n complete() {\\\\n scheduler.complete();\\\\n },\\\\n error(error) {\\\\n scheduler.error(error);\\\\n },\\\\n next(value) {\\\\n scheduler.schedule((next) => __awaiter(this, void 0, void 0, function* () {\\\\n const prevAcc = index === 0\\\\n ? (typeof seed === \\\\\\\"undefined\\\\\\\" ? value : seed)\\\\n : accumulated;\\\\n accumulated = yield accumulator(prevAcc, value, index++);\\\\n next(accumulated);\\\\n }));\\\\n }\\\\n });\\\\n return () => Object(_unsubscribe__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"])(subscription);\\\\n });\\\\n };\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (scan);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/scan.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/subject.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/subject.js ***!\\n \\\\*********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./observable */ \\\\\\\"./node_modules/observable-fns/dist.esm/observable.js\\\\\\\");\\\\n\\\\n// TODO: This observer iteration approach looks inelegant and expensive\\\\n// Idea: Come up with super class for Subscription that contains the\\\\n// notify*, ... methods and use it here\\\\n/**\\\\n * A subject is a \\\\\\\"hot\\\\\\\" observable (see `multicast`) that has its observer\\\\n * methods (`.next(value)`, `.error(error)`, `.complete()`) exposed.\\\\n *\\\\n * Be careful, though! With great power comes great responsibility. Only use\\\\n * the `Subject` when you really need to trigger updates \\\\\\\"from the outside\\\\\\\" and\\\\n * try to keep the code that can access it to a minimum. Return\\\\n * `Observable.from(mySubject)` to not allow other code to mutate.\\\\n */\\\\nclass MulticastSubject extends _observable__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"] {\\\\n constructor() {\\\\n super(observer => {\\\\n this._observers.add(observer);\\\\n return () => this._observers.delete(observer);\\\\n });\\\\n this._observers = new Set();\\\\n }\\\\n next(value) {\\\\n for (const observer of this._observers) {\\\\n observer.next(value);\\\\n }\\\\n }\\\\n error(error) {\\\\n for (const observer of this._observers) {\\\\n observer.error(error);\\\\n }\\\\n }\\\\n complete() {\\\\n for (const observer of this._observers) {\\\\n observer.complete();\\\\n }\\\\n }\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (MulticastSubject);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/subject.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/symbols.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/symbols.js ***!\\n \\\\*********************************************************/\\n/*! no exports provided */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/symbols.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/observable-fns/dist.esm/unsubscribe.js\\\":\\n/*!*************************************************************!*\\\\\\n !*** ./node_modules/observable-fns/dist.esm/unsubscribe.js ***!\\n \\\\*************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/**\\\\n * Unsubscribe from a subscription returned by something that looks like an observable,\\\\n * but is not necessarily our observable implementation.\\\\n */\\\\nfunction unsubscribe(subscription) {\\\\n if (typeof subscription === \\\\\\\"function\\\\\\\") {\\\\n subscription();\\\\n }\\\\n else if (subscription && typeof subscription.unsubscribe === \\\\\\\"function\\\\\\\") {\\\\n subscription.unsubscribe();\\\\n }\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (unsubscribe);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/observable-fns/dist.esm/unsubscribe.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/inflate.js\\\":\\n/*!******************************************!*\\\\\\n !*** ./node_modules/pako/lib/inflate.js ***!\\n \\\\******************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n\\\\nvar zlib_inflate = __webpack_require__(/*! ./zlib/inflate */ \\\\\\\"./node_modules/pako/lib/zlib/inflate.js\\\\\\\");\\\\nvar utils = __webpack_require__(/*! ./utils/common */ \\\\\\\"./node_modules/pako/lib/utils/common.js\\\\\\\");\\\\nvar strings = __webpack_require__(/*! ./utils/strings */ \\\\\\\"./node_modules/pako/lib/utils/strings.js\\\\\\\");\\\\nvar c = __webpack_require__(/*! ./zlib/constants */ \\\\\\\"./node_modules/pako/lib/zlib/constants.js\\\\\\\");\\\\nvar msg = __webpack_require__(/*! ./zlib/messages */ \\\\\\\"./node_modules/pako/lib/zlib/messages.js\\\\\\\");\\\\nvar ZStream = __webpack_require__(/*! ./zlib/zstream */ \\\\\\\"./node_modules/pako/lib/zlib/zstream.js\\\\\\\");\\\\nvar GZheader = __webpack_require__(/*! ./zlib/gzheader */ \\\\\\\"./node_modules/pako/lib/zlib/gzheader.js\\\\\\\");\\\\n\\\\nvar toString = Object.prototype.toString;\\\\n\\\\n/**\\\\n * class Inflate\\\\n *\\\\n * Generic JS-style wrapper for zlib calls. If you don't need\\\\n * streaming behaviour - use more simple functions: [[inflate]]\\\\n * and [[inflateRaw]].\\\\n **/\\\\n\\\\n/* internal\\\\n * inflate.chunks -> Array\\\\n *\\\\n * Chunks of output data, if [[Inflate#onData]] not overridden.\\\\n **/\\\\n\\\\n/**\\\\n * Inflate.result -> Uint8Array|Array|String\\\\n *\\\\n * Uncompressed result, generated by default [[Inflate#onData]]\\\\n * and [[Inflate#onEnd]] handlers. Filled after you push last chunk\\\\n * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you\\\\n * push a chunk with explicit flush (call [[Inflate#push]] with\\\\n * `Z_SYNC_FLUSH` param).\\\\n **/\\\\n\\\\n/**\\\\n * Inflate.err -> Number\\\\n *\\\\n * Error code after inflate finished. 0 (Z_OK) on success.\\\\n * Should be checked if broken data possible.\\\\n **/\\\\n\\\\n/**\\\\n * Inflate.msg -> String\\\\n *\\\\n * Error message, if [[Inflate.err]] != 0\\\\n **/\\\\n\\\\n\\\\n/**\\\\n * new Inflate(options)\\\\n * - options (Object): zlib inflate options.\\\\n *\\\\n * Creates new inflator instance with specified params. Throws exception\\\\n * on bad params. Supported options:\\\\n *\\\\n * - `windowBits`\\\\n * - `dictionary`\\\\n *\\\\n * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)\\\\n * for more information on these.\\\\n *\\\\n * Additional options, for internal needs:\\\\n *\\\\n * - `chunkSize` - size of generated data chunks (16K by default)\\\\n * - `raw` (Boolean) - do raw inflate\\\\n * - `to` (String) - if equal to 'string', then result will be converted\\\\n * from utf8 to utf16 (javascript) string. When string output requested,\\\\n * chunk length can differ from `chunkSize`, depending on content.\\\\n *\\\\n * By default, when no options set, autodetect deflate/gzip data format via\\\\n * wrapper header.\\\\n *\\\\n * ##### Example:\\\\n *\\\\n * ```javascript\\\\n * var pako = require('pako')\\\\n * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9])\\\\n * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]);\\\\n *\\\\n * var inflate = new pako.Inflate({ level: 3});\\\\n *\\\\n * inflate.push(chunk1, false);\\\\n * inflate.push(chunk2, true); // true -> last chunk\\\\n *\\\\n * if (inflate.err) { throw new Error(inflate.err); }\\\\n *\\\\n * console.log(inflate.result);\\\\n * ```\\\\n **/\\\\nfunction Inflate(options) {\\\\n if (!(this instanceof Inflate)) return new Inflate(options);\\\\n\\\\n this.options = utils.assign({\\\\n chunkSize: 16384,\\\\n windowBits: 0,\\\\n to: ''\\\\n }, options || {});\\\\n\\\\n var opt = this.options;\\\\n\\\\n // Force window size for `raw` data, if not set directly,\\\\n // because we have no header for autodetect.\\\\n if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) {\\\\n opt.windowBits = -opt.windowBits;\\\\n if (opt.windowBits === 0) { opt.windowBits = -15; }\\\\n }\\\\n\\\\n // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate\\\\n if ((opt.windowBits >= 0) && (opt.windowBits < 16) &&\\\\n !(options && options.windowBits)) {\\\\n opt.windowBits += 32;\\\\n }\\\\n\\\\n // Gzip header has no info about windows size, we can do autodetect only\\\\n // for deflate. So, if window size not set, force it to max when gzip possible\\\\n if ((opt.windowBits > 15) && (opt.windowBits < 48)) {\\\\n // bit 3 (16) -> gzipped data\\\\n // bit 4 (32) -> autodetect gzip/deflate\\\\n if ((opt.windowBits & 15) === 0) {\\\\n opt.windowBits |= 15;\\\\n }\\\\n }\\\\n\\\\n this.err = 0; // error code, if happens (0 = Z_OK)\\\\n this.msg = ''; // error message\\\\n this.ended = false; // used to avoid multiple onEnd() calls\\\\n this.chunks = []; // chunks of compressed data\\\\n\\\\n this.strm = new ZStream();\\\\n this.strm.avail_out = 0;\\\\n\\\\n var status = zlib_inflate.inflateInit2(\\\\n this.strm,\\\\n opt.windowBits\\\\n );\\\\n\\\\n if (status !== c.Z_OK) {\\\\n throw new Error(msg[status]);\\\\n }\\\\n\\\\n this.header = new GZheader();\\\\n\\\\n zlib_inflate.inflateGetHeader(this.strm, this.header);\\\\n\\\\n // Setup dictionary\\\\n if (opt.dictionary) {\\\\n // Convert data if needed\\\\n if (typeof opt.dictionary === 'string') {\\\\n opt.dictionary = strings.string2buf(opt.dictionary);\\\\n } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') {\\\\n opt.dictionary = new Uint8Array(opt.dictionary);\\\\n }\\\\n if (opt.raw) { //In raw mode we need to set the dictionary early\\\\n status = zlib_inflate.inflateSetDictionary(this.strm, opt.dictionary);\\\\n if (status !== c.Z_OK) {\\\\n throw new Error(msg[status]);\\\\n }\\\\n }\\\\n }\\\\n}\\\\n\\\\n/**\\\\n * Inflate#push(data[, mode]) -> Boolean\\\\n * - data (Uint8Array|Array|ArrayBuffer|String): input data\\\\n * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes.\\\\n * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH.\\\\n *\\\\n * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with\\\\n * new output chunks. Returns `true` on success. The last data block must have\\\\n * mode Z_FINISH (or `true`). That will flush internal pending buffers and call\\\\n * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you\\\\n * can use mode Z_SYNC_FLUSH, keeping the decompression context.\\\\n *\\\\n * On fail call [[Inflate#onEnd]] with error code and return false.\\\\n *\\\\n * We strongly recommend to use `Uint8Array` on input for best speed (output\\\\n * format is detected automatically). Also, don't skip last param and always\\\\n * use the same type in your code (boolean or number). That will improve JS speed.\\\\n *\\\\n * For regular `Array`-s make sure all elements are [0..255].\\\\n *\\\\n * ##### Example\\\\n *\\\\n * ```javascript\\\\n * push(chunk, false); // push one of data chunks\\\\n * ...\\\\n * push(chunk, true); // push last chunk\\\\n * ```\\\\n **/\\\\nInflate.prototype.push = function (data, mode) {\\\\n var strm = this.strm;\\\\n var chunkSize = this.options.chunkSize;\\\\n var dictionary = this.options.dictionary;\\\\n var status, _mode;\\\\n var next_out_utf8, tail, utf8str;\\\\n\\\\n // Flag to properly process Z_BUF_ERROR on testing inflate call\\\\n // when we check that all output data was flushed.\\\\n var allowBufError = false;\\\\n\\\\n if (this.ended) { return false; }\\\\n _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH);\\\\n\\\\n // Convert data if needed\\\\n if (typeof data === 'string') {\\\\n // Only binary strings can be decompressed on practice\\\\n strm.input = strings.binstring2buf(data);\\\\n } else if (toString.call(data) === '[object ArrayBuffer]') {\\\\n strm.input = new Uint8Array(data);\\\\n } else {\\\\n strm.input = data;\\\\n }\\\\n\\\\n strm.next_in = 0;\\\\n strm.avail_in = strm.input.length;\\\\n\\\\n do {\\\\n if (strm.avail_out === 0) {\\\\n strm.output = new utils.Buf8(chunkSize);\\\\n strm.next_out = 0;\\\\n strm.avail_out = chunkSize;\\\\n }\\\\n\\\\n status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */\\\\n\\\\n if (status === c.Z_NEED_DICT && dictionary) {\\\\n status = zlib_inflate.inflateSetDictionary(this.strm, dictionary);\\\\n }\\\\n\\\\n if (status === c.Z_BUF_ERROR && allowBufError === true) {\\\\n status = c.Z_OK;\\\\n allowBufError = false;\\\\n }\\\\n\\\\n if (status !== c.Z_STREAM_END && status !== c.Z_OK) {\\\\n this.onEnd(status);\\\\n this.ended = true;\\\\n return false;\\\\n }\\\\n\\\\n if (strm.next_out) {\\\\n if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) {\\\\n\\\\n if (this.options.to === 'string') {\\\\n\\\\n next_out_utf8 = strings.utf8border(strm.output, strm.next_out);\\\\n\\\\n tail = strm.next_out - next_out_utf8;\\\\n utf8str = strings.buf2string(strm.output, next_out_utf8);\\\\n\\\\n // move tail\\\\n strm.next_out = tail;\\\\n strm.avail_out = chunkSize - tail;\\\\n if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); }\\\\n\\\\n this.onData(utf8str);\\\\n\\\\n } else {\\\\n this.onData(utils.shrinkBuf(strm.output, strm.next_out));\\\\n }\\\\n }\\\\n }\\\\n\\\\n // When no more input data, we should check that internal inflate buffers\\\\n // are flushed. The only way to do it when avail_out = 0 - run one more\\\\n // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR.\\\\n // Here we set flag to process this error properly.\\\\n //\\\\n // NOTE. Deflate does not return error in this case and does not needs such\\\\n // logic.\\\\n if (strm.avail_in === 0 && strm.avail_out === 0) {\\\\n allowBufError = true;\\\\n }\\\\n\\\\n } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END);\\\\n\\\\n if (status === c.Z_STREAM_END) {\\\\n _mode = c.Z_FINISH;\\\\n }\\\\n\\\\n // Finalize on the last chunk.\\\\n if (_mode === c.Z_FINISH) {\\\\n status = zlib_inflate.inflateEnd(this.strm);\\\\n this.onEnd(status);\\\\n this.ended = true;\\\\n return status === c.Z_OK;\\\\n }\\\\n\\\\n // callback interim results if Z_SYNC_FLUSH.\\\\n if (_mode === c.Z_SYNC_FLUSH) {\\\\n this.onEnd(c.Z_OK);\\\\n strm.avail_out = 0;\\\\n return true;\\\\n }\\\\n\\\\n return true;\\\\n};\\\\n\\\\n\\\\n/**\\\\n * Inflate#onData(chunk) -> Void\\\\n * - chunk (Uint8Array|Array|String): output data. Type of array depends\\\\n * on js engine support. When string output requested, each chunk\\\\n * will be string.\\\\n *\\\\n * By default, stores data blocks in `chunks[]` property and glue\\\\n * those in `onEnd`. Override this handler, if you need another behaviour.\\\\n **/\\\\nInflate.prototype.onData = function (chunk) {\\\\n this.chunks.push(chunk);\\\\n};\\\\n\\\\n\\\\n/**\\\\n * Inflate#onEnd(status) -> Void\\\\n * - status (Number): inflate status. 0 (Z_OK) on success,\\\\n * other if not.\\\\n *\\\\n * Called either after you tell inflate that the input stream is\\\\n * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH)\\\\n * or if an error happened. By default - join collected chunks,\\\\n * free memory and fill `results` / `err` properties.\\\\n **/\\\\nInflate.prototype.onEnd = function (status) {\\\\n // On success - join\\\\n if (status === c.Z_OK) {\\\\n if (this.options.to === 'string') {\\\\n // Glue & convert here, until we teach pako to send\\\\n // utf8 aligned strings to onData\\\\n this.result = this.chunks.join('');\\\\n } else {\\\\n this.result = utils.flattenChunks(this.chunks);\\\\n }\\\\n }\\\\n this.chunks = [];\\\\n this.err = status;\\\\n this.msg = this.strm.msg;\\\\n};\\\\n\\\\n\\\\n/**\\\\n * inflate(data[, options]) -> Uint8Array|Array|String\\\\n * - data (Uint8Array|Array|String): input data to decompress.\\\\n * - options (Object): zlib inflate options.\\\\n *\\\\n * Decompress `data` with inflate/ungzip and `options`. Autodetect\\\\n * format via wrapper header by default. That's why we don't provide\\\\n * separate `ungzip` method.\\\\n *\\\\n * Supported options are:\\\\n *\\\\n * - windowBits\\\\n *\\\\n * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)\\\\n * for more information.\\\\n *\\\\n * Sugar (options):\\\\n *\\\\n * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify\\\\n * negative windowBits implicitly.\\\\n * - `to` (String) - if equal to 'string', then result will be converted\\\\n * from utf8 to utf16 (javascript) string. When string output requested,\\\\n * chunk length can differ from `chunkSize`, depending on content.\\\\n *\\\\n *\\\\n * ##### Example:\\\\n *\\\\n * ```javascript\\\\n * var pako = require('pako')\\\\n * , input = pako.deflate([1,2,3,4,5,6,7,8,9])\\\\n * , output;\\\\n *\\\\n * try {\\\\n * output = pako.inflate(input);\\\\n * } catch (err)\\\\n * console.log(err);\\\\n * }\\\\n * ```\\\\n **/\\\\nfunction inflate(input, options) {\\\\n var inflator = new Inflate(options);\\\\n\\\\n inflator.push(input, true);\\\\n\\\\n // That will never happens, if you don't cheat with options :)\\\\n if (inflator.err) { throw inflator.msg || msg[inflator.err]; }\\\\n\\\\n return inflator.result;\\\\n}\\\\n\\\\n\\\\n/**\\\\n * inflateRaw(data[, options]) -> Uint8Array|Array|String\\\\n * - data (Uint8Array|Array|String): input data to decompress.\\\\n * - options (Object): zlib inflate options.\\\\n *\\\\n * The same as [[inflate]], but creates raw data, without wrapper\\\\n * (header and adler32 crc).\\\\n **/\\\\nfunction inflateRaw(input, options) {\\\\n options = options || {};\\\\n options.raw = true;\\\\n return inflate(input, options);\\\\n}\\\\n\\\\n\\\\n/**\\\\n * ungzip(data[, options]) -> Uint8Array|Array|String\\\\n * - data (Uint8Array|Array|String): input data to decompress.\\\\n * - options (Object): zlib inflate options.\\\\n *\\\\n * Just shortcut to [[inflate]], because it autodetects format\\\\n * by header.content. Done for convenience.\\\\n **/\\\\n\\\\n\\\\nexports.Inflate = Inflate;\\\\nexports.inflate = inflate;\\\\nexports.inflateRaw = inflateRaw;\\\\nexports.ungzip = inflate;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/inflate.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/utils/common.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/pako/lib/utils/common.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n\\\\nvar TYPED_OK = (typeof Uint8Array !== 'undefined') &&\\\\n (typeof Uint16Array !== 'undefined') &&\\\\n (typeof Int32Array !== 'undefined');\\\\n\\\\nfunction _has(obj, key) {\\\\n return Object.prototype.hasOwnProperty.call(obj, key);\\\\n}\\\\n\\\\nexports.assign = function (obj /*from1, from2, from3, ...*/) {\\\\n var sources = Array.prototype.slice.call(arguments, 1);\\\\n while (sources.length) {\\\\n var source = sources.shift();\\\\n if (!source) { continue; }\\\\n\\\\n if (typeof source !== 'object') {\\\\n throw new TypeError(source + 'must be non-object');\\\\n }\\\\n\\\\n for (var p in source) {\\\\n if (_has(source, p)) {\\\\n obj[p] = source[p];\\\\n }\\\\n }\\\\n }\\\\n\\\\n return obj;\\\\n};\\\\n\\\\n\\\\n// reduce buffer size, avoiding mem copy\\\\nexports.shrinkBuf = function (buf, size) {\\\\n if (buf.length === size) { return buf; }\\\\n if (buf.subarray) { return buf.subarray(0, size); }\\\\n buf.length = size;\\\\n return buf;\\\\n};\\\\n\\\\n\\\\nvar fnTyped = {\\\\n arraySet: function (dest, src, src_offs, len, dest_offs) {\\\\n if (src.subarray && dest.subarray) {\\\\n dest.set(src.subarray(src_offs, src_offs + len), dest_offs);\\\\n return;\\\\n }\\\\n // Fallback to ordinary array\\\\n for (var i = 0; i < len; i++) {\\\\n dest[dest_offs + i] = src[src_offs + i];\\\\n }\\\\n },\\\\n // Join array of chunks to single array.\\\\n flattenChunks: function (chunks) {\\\\n var i, l, len, pos, chunk, result;\\\\n\\\\n // calculate data length\\\\n len = 0;\\\\n for (i = 0, l = chunks.length; i < l; i++) {\\\\n len += chunks[i].length;\\\\n }\\\\n\\\\n // join chunks\\\\n result = new Uint8Array(len);\\\\n pos = 0;\\\\n for (i = 0, l = chunks.length; i < l; i++) {\\\\n chunk = chunks[i];\\\\n result.set(chunk, pos);\\\\n pos += chunk.length;\\\\n }\\\\n\\\\n return result;\\\\n }\\\\n};\\\\n\\\\nvar fnUntyped = {\\\\n arraySet: function (dest, src, src_offs, len, dest_offs) {\\\\n for (var i = 0; i < len; i++) {\\\\n dest[dest_offs + i] = src[src_offs + i];\\\\n }\\\\n },\\\\n // Join array of chunks to single array.\\\\n flattenChunks: function (chunks) {\\\\n return [].concat.apply([], chunks);\\\\n }\\\\n};\\\\n\\\\n\\\\n// Enable/Disable typed arrays use, for testing\\\\n//\\\\nexports.setTyped = function (on) {\\\\n if (on) {\\\\n exports.Buf8 = Uint8Array;\\\\n exports.Buf16 = Uint16Array;\\\\n exports.Buf32 = Int32Array;\\\\n exports.assign(exports, fnTyped);\\\\n } else {\\\\n exports.Buf8 = Array;\\\\n exports.Buf16 = Array;\\\\n exports.Buf32 = Array;\\\\n exports.assign(exports, fnUntyped);\\\\n }\\\\n};\\\\n\\\\nexports.setTyped(TYPED_OK);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/utils/common.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/utils/strings.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/pako/lib/utils/strings.js ***!\\n \\\\************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// String encode/decode helpers\\\\n\\\\n\\\\n\\\\nvar utils = __webpack_require__(/*! ./common */ \\\\\\\"./node_modules/pako/lib/utils/common.js\\\\\\\");\\\\n\\\\n\\\\n// Quick check if we can use fast array to bin string conversion\\\\n//\\\\n// - apply(Array) can fail on Android 2.2\\\\n// - apply(Uint8Array) can fail on iOS 5.1 Safari\\\\n//\\\\nvar STR_APPLY_OK = true;\\\\nvar STR_APPLY_UIA_OK = true;\\\\n\\\\ntry { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; }\\\\ntry { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; }\\\\n\\\\n\\\\n// Table with utf8 lengths (calculated by first byte of sequence)\\\\n// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,\\\\n// because max possible codepoint is 0x10ffff\\\\nvar _utf8len = new utils.Buf8(256);\\\\nfor (var q = 0; q < 256; q++) {\\\\n _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1);\\\\n}\\\\n_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start\\\\n\\\\n\\\\n// convert string to array (typed, when possible)\\\\nexports.string2buf = function (str) {\\\\n var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;\\\\n\\\\n // count binary size\\\\n for (m_pos = 0; m_pos < str_len; m_pos++) {\\\\n c = str.charCodeAt(m_pos);\\\\n if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {\\\\n c2 = str.charCodeAt(m_pos + 1);\\\\n if ((c2 & 0xfc00) === 0xdc00) {\\\\n c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);\\\\n m_pos++;\\\\n }\\\\n }\\\\n buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;\\\\n }\\\\n\\\\n // allocate buffer\\\\n buf = new utils.Buf8(buf_len);\\\\n\\\\n // convert\\\\n for (i = 0, m_pos = 0; i < buf_len; m_pos++) {\\\\n c = str.charCodeAt(m_pos);\\\\n if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {\\\\n c2 = str.charCodeAt(m_pos + 1);\\\\n if ((c2 & 0xfc00) === 0xdc00) {\\\\n c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);\\\\n m_pos++;\\\\n }\\\\n }\\\\n if (c < 0x80) {\\\\n /* one byte */\\\\n buf[i++] = c;\\\\n } else if (c < 0x800) {\\\\n /* two bytes */\\\\n buf[i++] = 0xC0 | (c >>> 6);\\\\n buf[i++] = 0x80 | (c & 0x3f);\\\\n } else if (c < 0x10000) {\\\\n /* three bytes */\\\\n buf[i++] = 0xE0 | (c >>> 12);\\\\n buf[i++] = 0x80 | (c >>> 6 & 0x3f);\\\\n buf[i++] = 0x80 | (c & 0x3f);\\\\n } else {\\\\n /* four bytes */\\\\n buf[i++] = 0xf0 | (c >>> 18);\\\\n buf[i++] = 0x80 | (c >>> 12 & 0x3f);\\\\n buf[i++] = 0x80 | (c >>> 6 & 0x3f);\\\\n buf[i++] = 0x80 | (c & 0x3f);\\\\n }\\\\n }\\\\n\\\\n return buf;\\\\n};\\\\n\\\\n// Helper (used in 2 places)\\\\nfunction buf2binstring(buf, len) {\\\\n // On Chrome, the arguments in a function call that are allowed is `65534`.\\\\n // If the length of the buffer is smaller than that, we can use this optimization,\\\\n // otherwise we will take a slower path.\\\\n if (len < 65534) {\\\\n if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) {\\\\n return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len));\\\\n }\\\\n }\\\\n\\\\n var result = '';\\\\n for (var i = 0; i < len; i++) {\\\\n result += String.fromCharCode(buf[i]);\\\\n }\\\\n return result;\\\\n}\\\\n\\\\n\\\\n// Convert byte array to binary string\\\\nexports.buf2binstring = function (buf) {\\\\n return buf2binstring(buf, buf.length);\\\\n};\\\\n\\\\n\\\\n// Convert binary string (typed, when possible)\\\\nexports.binstring2buf = function (str) {\\\\n var buf = new utils.Buf8(str.length);\\\\n for (var i = 0, len = buf.length; i < len; i++) {\\\\n buf[i] = str.charCodeAt(i);\\\\n }\\\\n return buf;\\\\n};\\\\n\\\\n\\\\n// convert array to string\\\\nexports.buf2string = function (buf, max) {\\\\n var i, out, c, c_len;\\\\n var len = max || buf.length;\\\\n\\\\n // Reserve max possible length (2 words per char)\\\\n // NB: by unknown reasons, Array is significantly faster for\\\\n // String.fromCharCode.apply than Uint16Array.\\\\n var utf16buf = new Array(len * 2);\\\\n\\\\n for (out = 0, i = 0; i < len;) {\\\\n c = buf[i++];\\\\n // quick process ascii\\\\n if (c < 0x80) { utf16buf[out++] = c; continue; }\\\\n\\\\n c_len = _utf8len[c];\\\\n // skip 5 & 6 byte codes\\\\n if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; }\\\\n\\\\n // apply mask on first byte\\\\n c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;\\\\n // join the rest\\\\n while (c_len > 1 && i < len) {\\\\n c = (c << 6) | (buf[i++] & 0x3f);\\\\n c_len--;\\\\n }\\\\n\\\\n // terminated by end of string?\\\\n if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }\\\\n\\\\n if (c < 0x10000) {\\\\n utf16buf[out++] = c;\\\\n } else {\\\\n c -= 0x10000;\\\\n utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);\\\\n utf16buf[out++] = 0xdc00 | (c & 0x3ff);\\\\n }\\\\n }\\\\n\\\\n return buf2binstring(utf16buf, out);\\\\n};\\\\n\\\\n\\\\n// Calculate max possible position in utf8 buffer,\\\\n// that will not break sequence. If that's not possible\\\\n// - (very small limits) return max size as is.\\\\n//\\\\n// buf[] - utf8 bytes array\\\\n// max - length limit (mandatory);\\\\nexports.utf8border = function (buf, max) {\\\\n var pos;\\\\n\\\\n max = max || buf.length;\\\\n if (max > buf.length) { max = buf.length; }\\\\n\\\\n // go back from last position, until start of sequence found\\\\n pos = max - 1;\\\\n while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }\\\\n\\\\n // Very small and broken sequence,\\\\n // return max, because we should return something anyway.\\\\n if (pos < 0) { return max; }\\\\n\\\\n // If we came to start of buffer - that means buffer is too small,\\\\n // return max too.\\\\n if (pos === 0) { return max; }\\\\n\\\\n return (pos + _utf8len[buf[pos]] > max) ? pos : max;\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/utils/strings.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/adler32.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/adler32.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// Note: adler32 takes 12% for level 0 and 2% for level 6.\\\\n// It isn't worth it to make additional optimizations as in original.\\\\n// Small size is preferable.\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nfunction adler32(adler, buf, len, pos) {\\\\n var s1 = (adler & 0xffff) |0,\\\\n s2 = ((adler >>> 16) & 0xffff) |0,\\\\n n = 0;\\\\n\\\\n while (len !== 0) {\\\\n // Set limit ~ twice less than 5552, to keep\\\\n // s2 in 31-bits, because we force signed ints.\\\\n // in other case %= will fail.\\\\n n = len > 2000 ? 2000 : len;\\\\n len -= n;\\\\n\\\\n do {\\\\n s1 = (s1 + buf[pos++]) |0;\\\\n s2 = (s2 + s1) |0;\\\\n } while (--n);\\\\n\\\\n s1 %= 65521;\\\\n s2 %= 65521;\\\\n }\\\\n\\\\n return (s1 | (s2 << 16)) |0;\\\\n}\\\\n\\\\n\\\\nmodule.exports = adler32;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/adler32.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/constants.js\\\":\\n/*!*************************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/constants.js ***!\\n \\\\*************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nmodule.exports = {\\\\n\\\\n /* Allowed flush values; see deflate() and inflate() below for details */\\\\n Z_NO_FLUSH: 0,\\\\n Z_PARTIAL_FLUSH: 1,\\\\n Z_SYNC_FLUSH: 2,\\\\n Z_FULL_FLUSH: 3,\\\\n Z_FINISH: 4,\\\\n Z_BLOCK: 5,\\\\n Z_TREES: 6,\\\\n\\\\n /* Return codes for the compression/decompression functions. Negative values\\\\n * are errors, positive values are used for special but normal events.\\\\n */\\\\n Z_OK: 0,\\\\n Z_STREAM_END: 1,\\\\n Z_NEED_DICT: 2,\\\\n Z_ERRNO: -1,\\\\n Z_STREAM_ERROR: -2,\\\\n Z_DATA_ERROR: -3,\\\\n //Z_MEM_ERROR: -4,\\\\n Z_BUF_ERROR: -5,\\\\n //Z_VERSION_ERROR: -6,\\\\n\\\\n /* compression levels */\\\\n Z_NO_COMPRESSION: 0,\\\\n Z_BEST_SPEED: 1,\\\\n Z_BEST_COMPRESSION: 9,\\\\n Z_DEFAULT_COMPRESSION: -1,\\\\n\\\\n\\\\n Z_FILTERED: 1,\\\\n Z_HUFFMAN_ONLY: 2,\\\\n Z_RLE: 3,\\\\n Z_FIXED: 4,\\\\n Z_DEFAULT_STRATEGY: 0,\\\\n\\\\n /* Possible values of the data_type field (though see inflate()) */\\\\n Z_BINARY: 0,\\\\n Z_TEXT: 1,\\\\n //Z_ASCII: 1, // = Z_TEXT (deprecated)\\\\n Z_UNKNOWN: 2,\\\\n\\\\n /* The deflate compression method */\\\\n Z_DEFLATED: 8\\\\n //Z_NULL: null // Use -1 or null inline, depending on var type\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/constants.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/crc32.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/crc32.js ***!\\n \\\\*********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// Note: we can't get significant speed boost here.\\\\n// So write code to minimize size - no pregenerated tables\\\\n// and array tools dependencies.\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\n// Use ordinary array, since untyped makes no boost here\\\\nfunction makeTable() {\\\\n var c, table = [];\\\\n\\\\n for (var n = 0; n < 256; n++) {\\\\n c = n;\\\\n for (var k = 0; k < 8; k++) {\\\\n c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\\\\n }\\\\n table[n] = c;\\\\n }\\\\n\\\\n return table;\\\\n}\\\\n\\\\n// Create table on load. Just 255 signed longs. Not a problem.\\\\nvar crcTable = makeTable();\\\\n\\\\n\\\\nfunction crc32(crc, buf, len, pos) {\\\\n var t = crcTable,\\\\n end = pos + len;\\\\n\\\\n crc ^= -1;\\\\n\\\\n for (var i = pos; i < end; i++) {\\\\n crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];\\\\n }\\\\n\\\\n return (crc ^ (-1)); // >>> 0;\\\\n}\\\\n\\\\n\\\\nmodule.exports = crc32;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/crc32.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/gzheader.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/gzheader.js ***!\\n \\\\************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nfunction GZheader() {\\\\n /* true if compressed data believed to be text */\\\\n this.text = 0;\\\\n /* modification time */\\\\n this.time = 0;\\\\n /* extra flags (not used when writing a gzip file) */\\\\n this.xflags = 0;\\\\n /* operating system */\\\\n this.os = 0;\\\\n /* pointer to extra field or Z_NULL if none */\\\\n this.extra = null;\\\\n /* extra field length (valid if extra != Z_NULL) */\\\\n this.extra_len = 0; // Actually, we don't need it in JS,\\\\n // but leave for few code modifications\\\\n\\\\n //\\\\n // Setup limits is not necessary because in js we should not preallocate memory\\\\n // for inflate use constant limit in 65536 bytes\\\\n //\\\\n\\\\n /* space at extra (only when reading header) */\\\\n // this.extra_max = 0;\\\\n /* pointer to zero-terminated file name or Z_NULL */\\\\n this.name = '';\\\\n /* space at name (only when reading header) */\\\\n // this.name_max = 0;\\\\n /* pointer to zero-terminated comment or Z_NULL */\\\\n this.comment = '';\\\\n /* space at comment (only when reading header) */\\\\n // this.comm_max = 0;\\\\n /* true if there was or will be a header crc */\\\\n this.hcrc = 0;\\\\n /* true when done reading gzip header (not used when writing a gzip file) */\\\\n this.done = false;\\\\n}\\\\n\\\\nmodule.exports = GZheader;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/gzheader.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/inffast.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/inffast.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\n// See state defs from inflate.js\\\\nvar BAD = 30; /* got a data error -- remain here until reset */\\\\nvar TYPE = 12; /* i: waiting for type bits, including last-flag bit */\\\\n\\\\n/*\\\\n Decode literal, length, and distance codes and write out the resulting\\\\n literal and match bytes until either not enough input or output is\\\\n available, an end-of-block is encountered, or a data error is encountered.\\\\n When large enough input and output buffers are supplied to inflate(), for\\\\n example, a 16K input buffer and a 64K output buffer, more than 95% of the\\\\n inflate execution time is spent in this routine.\\\\n\\\\n Entry assumptions:\\\\n\\\\n state.mode === LEN\\\\n strm.avail_in >= 6\\\\n strm.avail_out >= 258\\\\n start >= strm.avail_out\\\\n state.bits < 8\\\\n\\\\n On return, state.mode is one of:\\\\n\\\\n LEN -- ran out of enough output space or enough available input\\\\n TYPE -- reached end of block code, inflate() to interpret next block\\\\n BAD -- error in block data\\\\n\\\\n Notes:\\\\n\\\\n - The maximum input bits used by a length/distance pair is 15 bits for the\\\\n length code, 5 bits for the length extra, 15 bits for the distance code,\\\\n and 13 bits for the distance extra. This totals 48 bits, or six bytes.\\\\n Therefore if strm.avail_in >= 6, then there is enough input to avoid\\\\n checking for available input while decoding.\\\\n\\\\n - The maximum bytes that a single length/distance pair can output is 258\\\\n bytes, which is the maximum length that can be coded. inflate_fast()\\\\n requires strm.avail_out >= 258 for each loop to avoid checking for\\\\n output space.\\\\n */\\\\nmodule.exports = function inflate_fast(strm, start) {\\\\n var state;\\\\n var _in; /* local strm.input */\\\\n var last; /* have enough input while in < last */\\\\n var _out; /* local strm.output */\\\\n var beg; /* inflate()'s initial strm.output */\\\\n var end; /* while out < end, enough space available */\\\\n//#ifdef INFLATE_STRICT\\\\n var dmax; /* maximum distance from zlib header */\\\\n//#endif\\\\n var wsize; /* window size or zero if not using window */\\\\n var whave; /* valid bytes in the window */\\\\n var wnext; /* window write index */\\\\n // Use `s_window` instead `window`, avoid conflict with instrumentation tools\\\\n var s_window; /* allocated sliding window, if wsize != 0 */\\\\n var hold; /* local strm.hold */\\\\n var bits; /* local strm.bits */\\\\n var lcode; /* local strm.lencode */\\\\n var dcode; /* local strm.distcode */\\\\n var lmask; /* mask for first level of length codes */\\\\n var dmask; /* mask for first level of distance codes */\\\\n var here; /* retrieved table entry */\\\\n var op; /* code bits, operation, extra bits, or */\\\\n /* window position, window bytes to copy */\\\\n var len; /* match length, unused bytes */\\\\n var dist; /* match distance */\\\\n var from; /* where to copy match from */\\\\n var from_source;\\\\n\\\\n\\\\n var input, output; // JS specific, because we have no pointers\\\\n\\\\n /* copy state to local variables */\\\\n state = strm.state;\\\\n //here = state.here;\\\\n _in = strm.next_in;\\\\n input = strm.input;\\\\n last = _in + (strm.avail_in - 5);\\\\n _out = strm.next_out;\\\\n output = strm.output;\\\\n beg = _out - (start - strm.avail_out);\\\\n end = _out + (strm.avail_out - 257);\\\\n//#ifdef INFLATE_STRICT\\\\n dmax = state.dmax;\\\\n//#endif\\\\n wsize = state.wsize;\\\\n whave = state.whave;\\\\n wnext = state.wnext;\\\\n s_window = state.window;\\\\n hold = state.hold;\\\\n bits = state.bits;\\\\n lcode = state.lencode;\\\\n dcode = state.distcode;\\\\n lmask = (1 << state.lenbits) - 1;\\\\n dmask = (1 << state.distbits) - 1;\\\\n\\\\n\\\\n /* decode literals and length/distances until end-of-block or not enough\\\\n input data or output space */\\\\n\\\\n top:\\\\n do {\\\\n if (bits < 15) {\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n }\\\\n\\\\n here = lcode[hold & lmask];\\\\n\\\\n dolen:\\\\n for (;;) { // Goto emulation\\\\n op = here >>> 24/*here.bits*/;\\\\n hold >>>= op;\\\\n bits -= op;\\\\n op = (here >>> 16) & 0xff/*here.op*/;\\\\n if (op === 0) { /* literal */\\\\n //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?\\\\n // \\\\\\\"inflate: literal '%c'\\\\\\\\n\\\\\\\" :\\\\n // \\\\\\\"inflate: literal 0x%02x\\\\\\\\n\\\\\\\", here.val));\\\\n output[_out++] = here & 0xffff/*here.val*/;\\\\n }\\\\n else if (op & 16) { /* length base */\\\\n len = here & 0xffff/*here.val*/;\\\\n op &= 15; /* number of extra bits */\\\\n if (op) {\\\\n if (bits < op) {\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n }\\\\n len += hold & ((1 << op) - 1);\\\\n hold >>>= op;\\\\n bits -= op;\\\\n }\\\\n //Tracevv((stderr, \\\\\\\"inflate: length %u\\\\\\\\n\\\\\\\", len));\\\\n if (bits < 15) {\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n }\\\\n here = dcode[hold & dmask];\\\\n\\\\n dodist:\\\\n for (;;) { // goto emulation\\\\n op = here >>> 24/*here.bits*/;\\\\n hold >>>= op;\\\\n bits -= op;\\\\n op = (here >>> 16) & 0xff/*here.op*/;\\\\n\\\\n if (op & 16) { /* distance base */\\\\n dist = here & 0xffff/*here.val*/;\\\\n op &= 15; /* number of extra bits */\\\\n if (bits < op) {\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n if (bits < op) {\\\\n hold += input[_in++] << bits;\\\\n bits += 8;\\\\n }\\\\n }\\\\n dist += hold & ((1 << op) - 1);\\\\n//#ifdef INFLATE_STRICT\\\\n if (dist > dmax) {\\\\n strm.msg = 'invalid distance too far back';\\\\n state.mode = BAD;\\\\n break top;\\\\n }\\\\n//#endif\\\\n hold >>>= op;\\\\n bits -= op;\\\\n //Tracevv((stderr, \\\\\\\"inflate: distance %u\\\\\\\\n\\\\\\\", dist));\\\\n op = _out - beg; /* max distance in output */\\\\n if (dist > op) { /* see if copy from window */\\\\n op = dist - op; /* distance back in window */\\\\n if (op > whave) {\\\\n if (state.sane) {\\\\n strm.msg = 'invalid distance too far back';\\\\n state.mode = BAD;\\\\n break top;\\\\n }\\\\n\\\\n// (!) This block is disabled in zlib defaults,\\\\n// don't enable it for binary compatibility\\\\n//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR\\\\n// if (len <= op - whave) {\\\\n// do {\\\\n// output[_out++] = 0;\\\\n// } while (--len);\\\\n// continue top;\\\\n// }\\\\n// len -= op - whave;\\\\n// do {\\\\n// output[_out++] = 0;\\\\n// } while (--op > whave);\\\\n// if (op === 0) {\\\\n// from = _out - dist;\\\\n// do {\\\\n// output[_out++] = output[from++];\\\\n// } while (--len);\\\\n// continue top;\\\\n// }\\\\n//#endif\\\\n }\\\\n from = 0; // window index\\\\n from_source = s_window;\\\\n if (wnext === 0) { /* very common case */\\\\n from += wsize - op;\\\\n if (op < len) { /* some from window */\\\\n len -= op;\\\\n do {\\\\n output[_out++] = s_window[from++];\\\\n } while (--op);\\\\n from = _out - dist; /* rest from output */\\\\n from_source = output;\\\\n }\\\\n }\\\\n else if (wnext < op) { /* wrap around window */\\\\n from += wsize + wnext - op;\\\\n op -= wnext;\\\\n if (op < len) { /* some from end of window */\\\\n len -= op;\\\\n do {\\\\n output[_out++] = s_window[from++];\\\\n } while (--op);\\\\n from = 0;\\\\n if (wnext < len) { /* some from start of window */\\\\n op = wnext;\\\\n len -= op;\\\\n do {\\\\n output[_out++] = s_window[from++];\\\\n } while (--op);\\\\n from = _out - dist; /* rest from output */\\\\n from_source = output;\\\\n }\\\\n }\\\\n }\\\\n else { /* contiguous in window */\\\\n from += wnext - op;\\\\n if (op < len) { /* some from window */\\\\n len -= op;\\\\n do {\\\\n output[_out++] = s_window[from++];\\\\n } while (--op);\\\\n from = _out - dist; /* rest from output */\\\\n from_source = output;\\\\n }\\\\n }\\\\n while (len > 2) {\\\\n output[_out++] = from_source[from++];\\\\n output[_out++] = from_source[from++];\\\\n output[_out++] = from_source[from++];\\\\n len -= 3;\\\\n }\\\\n if (len) {\\\\n output[_out++] = from_source[from++];\\\\n if (len > 1) {\\\\n output[_out++] = from_source[from++];\\\\n }\\\\n }\\\\n }\\\\n else {\\\\n from = _out - dist; /* copy direct from output */\\\\n do { /* minimum length is three */\\\\n output[_out++] = output[from++];\\\\n output[_out++] = output[from++];\\\\n output[_out++] = output[from++];\\\\n len -= 3;\\\\n } while (len > 2);\\\\n if (len) {\\\\n output[_out++] = output[from++];\\\\n if (len > 1) {\\\\n output[_out++] = output[from++];\\\\n }\\\\n }\\\\n }\\\\n }\\\\n else if ((op & 64) === 0) { /* 2nd level distance code */\\\\n here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];\\\\n continue dodist;\\\\n }\\\\n else {\\\\n strm.msg = 'invalid distance code';\\\\n state.mode = BAD;\\\\n break top;\\\\n }\\\\n\\\\n break; // need to emulate goto via \\\\\\\"continue\\\\\\\"\\\\n }\\\\n }\\\\n else if ((op & 64) === 0) { /* 2nd level length code */\\\\n here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];\\\\n continue dolen;\\\\n }\\\\n else if (op & 32) { /* end-of-block */\\\\n //Tracevv((stderr, \\\\\\\"inflate: end of block\\\\\\\\n\\\\\\\"));\\\\n state.mode = TYPE;\\\\n break top;\\\\n }\\\\n else {\\\\n strm.msg = 'invalid literal/length code';\\\\n state.mode = BAD;\\\\n break top;\\\\n }\\\\n\\\\n break; // need to emulate goto via \\\\\\\"continue\\\\\\\"\\\\n }\\\\n } while (_in < last && _out < end);\\\\n\\\\n /* return unused bytes (on entry, bits < 8, so in won't go too far back) */\\\\n len = bits >> 3;\\\\n _in -= len;\\\\n bits -= len << 3;\\\\n hold &= (1 << bits) - 1;\\\\n\\\\n /* update state and return */\\\\n strm.next_in = _in;\\\\n strm.next_out = _out;\\\\n strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last));\\\\n strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end));\\\\n state.hold = hold;\\\\n state.bits = bits;\\\\n return;\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inffast.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/inflate.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/inflate.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nvar utils = __webpack_require__(/*! ../utils/common */ \\\\\\\"./node_modules/pako/lib/utils/common.js\\\\\\\");\\\\nvar adler32 = __webpack_require__(/*! ./adler32 */ \\\\\\\"./node_modules/pako/lib/zlib/adler32.js\\\\\\\");\\\\nvar crc32 = __webpack_require__(/*! ./crc32 */ \\\\\\\"./node_modules/pako/lib/zlib/crc32.js\\\\\\\");\\\\nvar inflate_fast = __webpack_require__(/*! ./inffast */ \\\\\\\"./node_modules/pako/lib/zlib/inffast.js\\\\\\\");\\\\nvar inflate_table = __webpack_require__(/*! ./inftrees */ \\\\\\\"./node_modules/pako/lib/zlib/inftrees.js\\\\\\\");\\\\n\\\\nvar CODES = 0;\\\\nvar LENS = 1;\\\\nvar DISTS = 2;\\\\n\\\\n/* Public constants ==========================================================*/\\\\n/* ===========================================================================*/\\\\n\\\\n\\\\n/* Allowed flush values; see deflate() and inflate() below for details */\\\\n//var Z_NO_FLUSH = 0;\\\\n//var Z_PARTIAL_FLUSH = 1;\\\\n//var Z_SYNC_FLUSH = 2;\\\\n//var Z_FULL_FLUSH = 3;\\\\nvar Z_FINISH = 4;\\\\nvar Z_BLOCK = 5;\\\\nvar Z_TREES = 6;\\\\n\\\\n\\\\n/* Return codes for the compression/decompression functions. Negative values\\\\n * are errors, positive values are used for special but normal events.\\\\n */\\\\nvar Z_OK = 0;\\\\nvar Z_STREAM_END = 1;\\\\nvar Z_NEED_DICT = 2;\\\\n//var Z_ERRNO = -1;\\\\nvar Z_STREAM_ERROR = -2;\\\\nvar Z_DATA_ERROR = -3;\\\\nvar Z_MEM_ERROR = -4;\\\\nvar Z_BUF_ERROR = -5;\\\\n//var Z_VERSION_ERROR = -6;\\\\n\\\\n/* The deflate compression method */\\\\nvar Z_DEFLATED = 8;\\\\n\\\\n\\\\n/* STATES ====================================================================*/\\\\n/* ===========================================================================*/\\\\n\\\\n\\\\nvar HEAD = 1; /* i: waiting for magic header */\\\\nvar FLAGS = 2; /* i: waiting for method and flags (gzip) */\\\\nvar TIME = 3; /* i: waiting for modification time (gzip) */\\\\nvar OS = 4; /* i: waiting for extra flags and operating system (gzip) */\\\\nvar EXLEN = 5; /* i: waiting for extra length (gzip) */\\\\nvar EXTRA = 6; /* i: waiting for extra bytes (gzip) */\\\\nvar NAME = 7; /* i: waiting for end of file name (gzip) */\\\\nvar COMMENT = 8; /* i: waiting for end of comment (gzip) */\\\\nvar HCRC = 9; /* i: waiting for header crc (gzip) */\\\\nvar DICTID = 10; /* i: waiting for dictionary check value */\\\\nvar DICT = 11; /* waiting for inflateSetDictionary() call */\\\\nvar TYPE = 12; /* i: waiting for type bits, including last-flag bit */\\\\nvar TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */\\\\nvar STORED = 14; /* i: waiting for stored size (length and complement) */\\\\nvar COPY_ = 15; /* i/o: same as COPY below, but only first time in */\\\\nvar COPY = 16; /* i/o: waiting for input or output to copy stored block */\\\\nvar TABLE = 17; /* i: waiting for dynamic block table lengths */\\\\nvar LENLENS = 18; /* i: waiting for code length code lengths */\\\\nvar CODELENS = 19; /* i: waiting for length/lit and distance code lengths */\\\\nvar LEN_ = 20; /* i: same as LEN below, but only first time in */\\\\nvar LEN = 21; /* i: waiting for length/lit/eob code */\\\\nvar LENEXT = 22; /* i: waiting for length extra bits */\\\\nvar DIST = 23; /* i: waiting for distance code */\\\\nvar DISTEXT = 24; /* i: waiting for distance extra bits */\\\\nvar MATCH = 25; /* o: waiting for output space to copy string */\\\\nvar LIT = 26; /* o: waiting for output space to write literal */\\\\nvar CHECK = 27; /* i: waiting for 32-bit check value */\\\\nvar LENGTH = 28; /* i: waiting for 32-bit length (gzip) */\\\\nvar DONE = 29; /* finished check, done -- remain here until reset */\\\\nvar BAD = 30; /* got a data error -- remain here until reset */\\\\nvar MEM = 31; /* got an inflate() memory error -- remain here until reset */\\\\nvar SYNC = 32; /* looking for synchronization bytes to restart inflate() */\\\\n\\\\n/* ===========================================================================*/\\\\n\\\\n\\\\n\\\\nvar ENOUGH_LENS = 852;\\\\nvar ENOUGH_DISTS = 592;\\\\n//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);\\\\n\\\\nvar MAX_WBITS = 15;\\\\n/* 32K LZ77 window */\\\\nvar DEF_WBITS = MAX_WBITS;\\\\n\\\\n\\\\nfunction zswap32(q) {\\\\n return (((q >>> 24) & 0xff) +\\\\n ((q >>> 8) & 0xff00) +\\\\n ((q & 0xff00) << 8) +\\\\n ((q & 0xff) << 24));\\\\n}\\\\n\\\\n\\\\nfunction InflateState() {\\\\n this.mode = 0; /* current inflate mode */\\\\n this.last = false; /* true if processing last block */\\\\n this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */\\\\n this.havedict = false; /* true if dictionary provided */\\\\n this.flags = 0; /* gzip header method and flags (0 if zlib) */\\\\n this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */\\\\n this.check = 0; /* protected copy of check value */\\\\n this.total = 0; /* protected copy of output count */\\\\n // TODO: may be {}\\\\n this.head = null; /* where to save gzip header information */\\\\n\\\\n /* sliding window */\\\\n this.wbits = 0; /* log base 2 of requested window size */\\\\n this.wsize = 0; /* window size or zero if not using window */\\\\n this.whave = 0; /* valid bytes in the window */\\\\n this.wnext = 0; /* window write index */\\\\n this.window = null; /* allocated sliding window, if needed */\\\\n\\\\n /* bit accumulator */\\\\n this.hold = 0; /* input bit accumulator */\\\\n this.bits = 0; /* number of bits in \\\\\\\"in\\\\\\\" */\\\\n\\\\n /* for string and stored block copying */\\\\n this.length = 0; /* literal or length of data to copy */\\\\n this.offset = 0; /* distance back to copy string from */\\\\n\\\\n /* for table and code decoding */\\\\n this.extra = 0; /* extra bits needed */\\\\n\\\\n /* fixed and dynamic code tables */\\\\n this.lencode = null; /* starting table for length/literal codes */\\\\n this.distcode = null; /* starting table for distance codes */\\\\n this.lenbits = 0; /* index bits for lencode */\\\\n this.distbits = 0; /* index bits for distcode */\\\\n\\\\n /* dynamic table building */\\\\n this.ncode = 0; /* number of code length code lengths */\\\\n this.nlen = 0; /* number of length code lengths */\\\\n this.ndist = 0; /* number of distance code lengths */\\\\n this.have = 0; /* number of code lengths in lens[] */\\\\n this.next = null; /* next available space in codes[] */\\\\n\\\\n this.lens = new utils.Buf16(320); /* temporary storage for code lengths */\\\\n this.work = new utils.Buf16(288); /* work area for code table building */\\\\n\\\\n /*\\\\n because we don't have pointers in js, we use lencode and distcode directly\\\\n as buffers so we don't need codes\\\\n */\\\\n //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */\\\\n this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */\\\\n this.distdyn = null; /* dynamic table for distance codes (JS specific) */\\\\n this.sane = 0; /* if false, allow invalid distance too far */\\\\n this.back = 0; /* bits back of last unprocessed length/lit */\\\\n this.was = 0; /* initial length of match */\\\\n}\\\\n\\\\nfunction inflateResetKeep(strm) {\\\\n var state;\\\\n\\\\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\\\\n state = strm.state;\\\\n strm.total_in = strm.total_out = state.total = 0;\\\\n strm.msg = ''; /*Z_NULL*/\\\\n if (state.wrap) { /* to support ill-conceived Java test suite */\\\\n strm.adler = state.wrap & 1;\\\\n }\\\\n state.mode = HEAD;\\\\n state.last = 0;\\\\n state.havedict = 0;\\\\n state.dmax = 32768;\\\\n state.head = null/*Z_NULL*/;\\\\n state.hold = 0;\\\\n state.bits = 0;\\\\n //state.lencode = state.distcode = state.next = state.codes;\\\\n state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS);\\\\n state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS);\\\\n\\\\n state.sane = 1;\\\\n state.back = -1;\\\\n //Tracev((stderr, \\\\\\\"inflate: reset\\\\\\\\n\\\\\\\"));\\\\n return Z_OK;\\\\n}\\\\n\\\\nfunction inflateReset(strm) {\\\\n var state;\\\\n\\\\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\\\\n state = strm.state;\\\\n state.wsize = 0;\\\\n state.whave = 0;\\\\n state.wnext = 0;\\\\n return inflateResetKeep(strm);\\\\n\\\\n}\\\\n\\\\nfunction inflateReset2(strm, windowBits) {\\\\n var wrap;\\\\n var state;\\\\n\\\\n /* get the state */\\\\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\\\\n state = strm.state;\\\\n\\\\n /* extract wrap request from windowBits parameter */\\\\n if (windowBits < 0) {\\\\n wrap = 0;\\\\n windowBits = -windowBits;\\\\n }\\\\n else {\\\\n wrap = (windowBits >> 4) + 1;\\\\n if (windowBits < 48) {\\\\n windowBits &= 15;\\\\n }\\\\n }\\\\n\\\\n /* set number of window bits, free window if different */\\\\n if (windowBits && (windowBits < 8 || windowBits > 15)) {\\\\n return Z_STREAM_ERROR;\\\\n }\\\\n if (state.window !== null && state.wbits !== windowBits) {\\\\n state.window = null;\\\\n }\\\\n\\\\n /* update state and reset the rest of it */\\\\n state.wrap = wrap;\\\\n state.wbits = windowBits;\\\\n return inflateReset(strm);\\\\n}\\\\n\\\\nfunction inflateInit2(strm, windowBits) {\\\\n var ret;\\\\n var state;\\\\n\\\\n if (!strm) { return Z_STREAM_ERROR; }\\\\n //strm.msg = Z_NULL; /* in case we return an error */\\\\n\\\\n state = new InflateState();\\\\n\\\\n //if (state === Z_NULL) return Z_MEM_ERROR;\\\\n //Tracev((stderr, \\\\\\\"inflate: allocated\\\\\\\\n\\\\\\\"));\\\\n strm.state = state;\\\\n state.window = null/*Z_NULL*/;\\\\n ret = inflateReset2(strm, windowBits);\\\\n if (ret !== Z_OK) {\\\\n strm.state = null/*Z_NULL*/;\\\\n }\\\\n return ret;\\\\n}\\\\n\\\\nfunction inflateInit(strm) {\\\\n return inflateInit2(strm, DEF_WBITS);\\\\n}\\\\n\\\\n\\\\n/*\\\\n Return state with length and distance decoding tables and index sizes set to\\\\n fixed code decoding. Normally this returns fixed tables from inffixed.h.\\\\n If BUILDFIXED is defined, then instead this routine builds the tables the\\\\n first time it's called, and returns those tables the first time and\\\\n thereafter. This reduces the size of the code by about 2K bytes, in\\\\n exchange for a little execution time. However, BUILDFIXED should not be\\\\n used for threaded applications, since the rewriting of the tables and virgin\\\\n may not be thread-safe.\\\\n */\\\\nvar virgin = true;\\\\n\\\\nvar lenfix, distfix; // We have no pointers in JS, so keep tables separate\\\\n\\\\nfunction fixedtables(state) {\\\\n /* build fixed huffman tables if first call (may not be thread safe) */\\\\n if (virgin) {\\\\n var sym;\\\\n\\\\n lenfix = new utils.Buf32(512);\\\\n distfix = new utils.Buf32(32);\\\\n\\\\n /* literal/length table */\\\\n sym = 0;\\\\n while (sym < 144) { state.lens[sym++] = 8; }\\\\n while (sym < 256) { state.lens[sym++] = 9; }\\\\n while (sym < 280) { state.lens[sym++] = 7; }\\\\n while (sym < 288) { state.lens[sym++] = 8; }\\\\n\\\\n inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 });\\\\n\\\\n /* distance table */\\\\n sym = 0;\\\\n while (sym < 32) { state.lens[sym++] = 5; }\\\\n\\\\n inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 });\\\\n\\\\n /* do this just once */\\\\n virgin = false;\\\\n }\\\\n\\\\n state.lencode = lenfix;\\\\n state.lenbits = 9;\\\\n state.distcode = distfix;\\\\n state.distbits = 5;\\\\n}\\\\n\\\\n\\\\n/*\\\\n Update the window with the last wsize (normally 32K) bytes written before\\\\n returning. If window does not exist yet, create it. This is only called\\\\n when a window is already in use, or when output has been written during this\\\\n inflate call, but the end of the deflate stream has not been reached yet.\\\\n It is also called to create a window for dictionary data when a dictionary\\\\n is loaded.\\\\n\\\\n Providing output buffers larger than 32K to inflate() should provide a speed\\\\n advantage, since only the last 32K of output is copied to the sliding window\\\\n upon return from inflate(), and since all distances after the first 32K of\\\\n output will fall in the output data, making match copies simpler and faster.\\\\n The advantage may be dependent on the size of the processor's data caches.\\\\n */\\\\nfunction updatewindow(strm, src, end, copy) {\\\\n var dist;\\\\n var state = strm.state;\\\\n\\\\n /* if it hasn't been done already, allocate space for the window */\\\\n if (state.window === null) {\\\\n state.wsize = 1 << state.wbits;\\\\n state.wnext = 0;\\\\n state.whave = 0;\\\\n\\\\n state.window = new utils.Buf8(state.wsize);\\\\n }\\\\n\\\\n /* copy state->wsize or less output bytes into the circular window */\\\\n if (copy >= state.wsize) {\\\\n utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0);\\\\n state.wnext = 0;\\\\n state.whave = state.wsize;\\\\n }\\\\n else {\\\\n dist = state.wsize - state.wnext;\\\\n if (dist > copy) {\\\\n dist = copy;\\\\n }\\\\n //zmemcpy(state->window + state->wnext, end - copy, dist);\\\\n utils.arraySet(state.window, src, end - copy, dist, state.wnext);\\\\n copy -= dist;\\\\n if (copy) {\\\\n //zmemcpy(state->window, end - copy, copy);\\\\n utils.arraySet(state.window, src, end - copy, copy, 0);\\\\n state.wnext = copy;\\\\n state.whave = state.wsize;\\\\n }\\\\n else {\\\\n state.wnext += dist;\\\\n if (state.wnext === state.wsize) { state.wnext = 0; }\\\\n if (state.whave < state.wsize) { state.whave += dist; }\\\\n }\\\\n }\\\\n return 0;\\\\n}\\\\n\\\\nfunction inflate(strm, flush) {\\\\n var state;\\\\n var input, output; // input/output buffers\\\\n var next; /* next input INDEX */\\\\n var put; /* next output INDEX */\\\\n var have, left; /* available input and output */\\\\n var hold; /* bit buffer */\\\\n var bits; /* bits in bit buffer */\\\\n var _in, _out; /* save starting available input and output */\\\\n var copy; /* number of stored or match bytes to copy */\\\\n var from; /* where to copy match bytes from */\\\\n var from_source;\\\\n var here = 0; /* current decoding table entry */\\\\n var here_bits, here_op, here_val; // paked \\\\\\\"here\\\\\\\" denormalized (JS specific)\\\\n //var last; /* parent table entry */\\\\n var last_bits, last_op, last_val; // paked \\\\\\\"last\\\\\\\" denormalized (JS specific)\\\\n var len; /* length to copy for repeats, bits to drop */\\\\n var ret; /* return code */\\\\n var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */\\\\n var opts;\\\\n\\\\n var n; // temporary var for NEED_BITS\\\\n\\\\n var order = /* permutation of code lengths */\\\\n [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ];\\\\n\\\\n\\\\n if (!strm || !strm.state || !strm.output ||\\\\n (!strm.input && strm.avail_in !== 0)) {\\\\n return Z_STREAM_ERROR;\\\\n }\\\\n\\\\n state = strm.state;\\\\n if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */\\\\n\\\\n\\\\n //--- LOAD() ---\\\\n put = strm.next_out;\\\\n output = strm.output;\\\\n left = strm.avail_out;\\\\n next = strm.next_in;\\\\n input = strm.input;\\\\n have = strm.avail_in;\\\\n hold = state.hold;\\\\n bits = state.bits;\\\\n //---\\\\n\\\\n _in = have;\\\\n _out = left;\\\\n ret = Z_OK;\\\\n\\\\n inf_leave: // goto emulation\\\\n for (;;) {\\\\n switch (state.mode) {\\\\n case HEAD:\\\\n if (state.wrap === 0) {\\\\n state.mode = TYPEDO;\\\\n break;\\\\n }\\\\n //=== NEEDBITS(16);\\\\n while (bits < 16) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */\\\\n state.check = 0/*crc32(0L, Z_NULL, 0)*/;\\\\n //=== CRC2(state.check, hold);\\\\n hbuf[0] = hold & 0xff;\\\\n hbuf[1] = (hold >>> 8) & 0xff;\\\\n state.check = crc32(state.check, hbuf, 2, 0);\\\\n //===//\\\\n\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = FLAGS;\\\\n break;\\\\n }\\\\n state.flags = 0; /* expect zlib header */\\\\n if (state.head) {\\\\n state.head.done = false;\\\\n }\\\\n if (!(state.wrap & 1) || /* check if zlib header allowed */\\\\n (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) {\\\\n strm.msg = 'incorrect header check';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) {\\\\n strm.msg = 'unknown compression method';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //--- DROPBITS(4) ---//\\\\n hold >>>= 4;\\\\n bits -= 4;\\\\n //---//\\\\n len = (hold & 0x0f)/*BITS(4)*/ + 8;\\\\n if (state.wbits === 0) {\\\\n state.wbits = len;\\\\n }\\\\n else if (len > state.wbits) {\\\\n strm.msg = 'invalid window size';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n state.dmax = 1 << len;\\\\n //Tracev((stderr, \\\\\\\"inflate: zlib header ok\\\\\\\\n\\\\\\\"));\\\\n strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;\\\\n state.mode = hold & 0x200 ? DICTID : TYPE;\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n break;\\\\n case FLAGS:\\\\n //=== NEEDBITS(16); */\\\\n while (bits < 16) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.flags = hold;\\\\n if ((state.flags & 0xff) !== Z_DEFLATED) {\\\\n strm.msg = 'unknown compression method';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n if (state.flags & 0xe000) {\\\\n strm.msg = 'unknown header flags set';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n if (state.head) {\\\\n state.head.text = ((hold >> 8) & 1);\\\\n }\\\\n if (state.flags & 0x0200) {\\\\n //=== CRC2(state.check, hold);\\\\n hbuf[0] = hold & 0xff;\\\\n hbuf[1] = (hold >>> 8) & 0xff;\\\\n state.check = crc32(state.check, hbuf, 2, 0);\\\\n //===//\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = TIME;\\\\n /* falls through */\\\\n case TIME:\\\\n //=== NEEDBITS(32); */\\\\n while (bits < 32) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if (state.head) {\\\\n state.head.time = hold;\\\\n }\\\\n if (state.flags & 0x0200) {\\\\n //=== CRC4(state.check, hold)\\\\n hbuf[0] = hold & 0xff;\\\\n hbuf[1] = (hold >>> 8) & 0xff;\\\\n hbuf[2] = (hold >>> 16) & 0xff;\\\\n hbuf[3] = (hold >>> 24) & 0xff;\\\\n state.check = crc32(state.check, hbuf, 4, 0);\\\\n //===\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = OS;\\\\n /* falls through */\\\\n case OS:\\\\n //=== NEEDBITS(16); */\\\\n while (bits < 16) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if (state.head) {\\\\n state.head.xflags = (hold & 0xff);\\\\n state.head.os = (hold >> 8);\\\\n }\\\\n if (state.flags & 0x0200) {\\\\n //=== CRC2(state.check, hold);\\\\n hbuf[0] = hold & 0xff;\\\\n hbuf[1] = (hold >>> 8) & 0xff;\\\\n state.check = crc32(state.check, hbuf, 2, 0);\\\\n //===//\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = EXLEN;\\\\n /* falls through */\\\\n case EXLEN:\\\\n if (state.flags & 0x0400) {\\\\n //=== NEEDBITS(16); */\\\\n while (bits < 16) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.length = hold;\\\\n if (state.head) {\\\\n state.head.extra_len = hold;\\\\n }\\\\n if (state.flags & 0x0200) {\\\\n //=== CRC2(state.check, hold);\\\\n hbuf[0] = hold & 0xff;\\\\n hbuf[1] = (hold >>> 8) & 0xff;\\\\n state.check = crc32(state.check, hbuf, 2, 0);\\\\n //===//\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n }\\\\n else if (state.head) {\\\\n state.head.extra = null/*Z_NULL*/;\\\\n }\\\\n state.mode = EXTRA;\\\\n /* falls through */\\\\n case EXTRA:\\\\n if (state.flags & 0x0400) {\\\\n copy = state.length;\\\\n if (copy > have) { copy = have; }\\\\n if (copy) {\\\\n if (state.head) {\\\\n len = state.head.extra_len - state.length;\\\\n if (!state.head.extra) {\\\\n // Use untyped array for more convenient processing later\\\\n state.head.extra = new Array(state.head.extra_len);\\\\n }\\\\n utils.arraySet(\\\\n state.head.extra,\\\\n input,\\\\n next,\\\\n // extra field is limited to 65536 bytes\\\\n // - no need for additional size check\\\\n copy,\\\\n /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/\\\\n len\\\\n );\\\\n //zmemcpy(state.head.extra + len, next,\\\\n // len + copy > state.head.extra_max ?\\\\n // state.head.extra_max - len : copy);\\\\n }\\\\n if (state.flags & 0x0200) {\\\\n state.check = crc32(state.check, input, copy, next);\\\\n }\\\\n have -= copy;\\\\n next += copy;\\\\n state.length -= copy;\\\\n }\\\\n if (state.length) { break inf_leave; }\\\\n }\\\\n state.length = 0;\\\\n state.mode = NAME;\\\\n /* falls through */\\\\n case NAME:\\\\n if (state.flags & 0x0800) {\\\\n if (have === 0) { break inf_leave; }\\\\n copy = 0;\\\\n do {\\\\n // TODO: 2 or 1 bytes?\\\\n len = input[next + copy++];\\\\n /* use constant limit because in js we should not preallocate memory */\\\\n if (state.head && len &&\\\\n (state.length < 65536 /*state.head.name_max*/)) {\\\\n state.head.name += String.fromCharCode(len);\\\\n }\\\\n } while (len && copy < have);\\\\n\\\\n if (state.flags & 0x0200) {\\\\n state.check = crc32(state.check, input, copy, next);\\\\n }\\\\n have -= copy;\\\\n next += copy;\\\\n if (len) { break inf_leave; }\\\\n }\\\\n else if (state.head) {\\\\n state.head.name = null;\\\\n }\\\\n state.length = 0;\\\\n state.mode = COMMENT;\\\\n /* falls through */\\\\n case COMMENT:\\\\n if (state.flags & 0x1000) {\\\\n if (have === 0) { break inf_leave; }\\\\n copy = 0;\\\\n do {\\\\n len = input[next + copy++];\\\\n /* use constant limit because in js we should not preallocate memory */\\\\n if (state.head && len &&\\\\n (state.length < 65536 /*state.head.comm_max*/)) {\\\\n state.head.comment += String.fromCharCode(len);\\\\n }\\\\n } while (len && copy < have);\\\\n if (state.flags & 0x0200) {\\\\n state.check = crc32(state.check, input, copy, next);\\\\n }\\\\n have -= copy;\\\\n next += copy;\\\\n if (len) { break inf_leave; }\\\\n }\\\\n else if (state.head) {\\\\n state.head.comment = null;\\\\n }\\\\n state.mode = HCRC;\\\\n /* falls through */\\\\n case HCRC:\\\\n if (state.flags & 0x0200) {\\\\n //=== NEEDBITS(16); */\\\\n while (bits < 16) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if (hold !== (state.check & 0xffff)) {\\\\n strm.msg = 'header crc mismatch';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n }\\\\n if (state.head) {\\\\n state.head.hcrc = ((state.flags >> 9) & 1);\\\\n state.head.done = true;\\\\n }\\\\n strm.adler = state.check = 0;\\\\n state.mode = TYPE;\\\\n break;\\\\n case DICTID:\\\\n //=== NEEDBITS(32); */\\\\n while (bits < 32) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n strm.adler = state.check = zswap32(hold);\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = DICT;\\\\n /* falls through */\\\\n case DICT:\\\\n if (state.havedict === 0) {\\\\n //--- RESTORE() ---\\\\n strm.next_out = put;\\\\n strm.avail_out = left;\\\\n strm.next_in = next;\\\\n strm.avail_in = have;\\\\n state.hold = hold;\\\\n state.bits = bits;\\\\n //---\\\\n return Z_NEED_DICT;\\\\n }\\\\n strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;\\\\n state.mode = TYPE;\\\\n /* falls through */\\\\n case TYPE:\\\\n if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; }\\\\n /* falls through */\\\\n case TYPEDO:\\\\n if (state.last) {\\\\n //--- BYTEBITS() ---//\\\\n hold >>>= bits & 7;\\\\n bits -= bits & 7;\\\\n //---//\\\\n state.mode = CHECK;\\\\n break;\\\\n }\\\\n //=== NEEDBITS(3); */\\\\n while (bits < 3) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.last = (hold & 0x01)/*BITS(1)*/;\\\\n //--- DROPBITS(1) ---//\\\\n hold >>>= 1;\\\\n bits -= 1;\\\\n //---//\\\\n\\\\n switch ((hold & 0x03)/*BITS(2)*/) {\\\\n case 0: /* stored block */\\\\n //Tracev((stderr, \\\\\\\"inflate: stored block%s\\\\\\\\n\\\\\\\",\\\\n // state.last ? \\\\\\\" (last)\\\\\\\" : \\\\\\\"\\\\\\\"));\\\\n state.mode = STORED;\\\\n break;\\\\n case 1: /* fixed block */\\\\n fixedtables(state);\\\\n //Tracev((stderr, \\\\\\\"inflate: fixed codes block%s\\\\\\\\n\\\\\\\",\\\\n // state.last ? \\\\\\\" (last)\\\\\\\" : \\\\\\\"\\\\\\\"));\\\\n state.mode = LEN_; /* decode codes */\\\\n if (flush === Z_TREES) {\\\\n //--- DROPBITS(2) ---//\\\\n hold >>>= 2;\\\\n bits -= 2;\\\\n //---//\\\\n break inf_leave;\\\\n }\\\\n break;\\\\n case 2: /* dynamic block */\\\\n //Tracev((stderr, \\\\\\\"inflate: dynamic codes block%s\\\\\\\\n\\\\\\\",\\\\n // state.last ? \\\\\\\" (last)\\\\\\\" : \\\\\\\"\\\\\\\"));\\\\n state.mode = TABLE;\\\\n break;\\\\n case 3:\\\\n strm.msg = 'invalid block type';\\\\n state.mode = BAD;\\\\n }\\\\n //--- DROPBITS(2) ---//\\\\n hold >>>= 2;\\\\n bits -= 2;\\\\n //---//\\\\n break;\\\\n case STORED:\\\\n //--- BYTEBITS() ---// /* go to byte boundary */\\\\n hold >>>= bits & 7;\\\\n bits -= bits & 7;\\\\n //---//\\\\n //=== NEEDBITS(32); */\\\\n while (bits < 32) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) {\\\\n strm.msg = 'invalid stored block lengths';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n state.length = hold & 0xffff;\\\\n //Tracev((stderr, \\\\\\\"inflate: stored length %u\\\\\\\\n\\\\\\\",\\\\n // state.length));\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n state.mode = COPY_;\\\\n if (flush === Z_TREES) { break inf_leave; }\\\\n /* falls through */\\\\n case COPY_:\\\\n state.mode = COPY;\\\\n /* falls through */\\\\n case COPY:\\\\n copy = state.length;\\\\n if (copy) {\\\\n if (copy > have) { copy = have; }\\\\n if (copy > left) { copy = left; }\\\\n if (copy === 0) { break inf_leave; }\\\\n //--- zmemcpy(put, next, copy); ---\\\\n utils.arraySet(output, input, next, copy, put);\\\\n //---//\\\\n have -= copy;\\\\n next += copy;\\\\n left -= copy;\\\\n put += copy;\\\\n state.length -= copy;\\\\n break;\\\\n }\\\\n //Tracev((stderr, \\\\\\\"inflate: stored end\\\\\\\\n\\\\\\\"));\\\\n state.mode = TYPE;\\\\n break;\\\\n case TABLE:\\\\n //=== NEEDBITS(14); */\\\\n while (bits < 14) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257;\\\\n //--- DROPBITS(5) ---//\\\\n hold >>>= 5;\\\\n bits -= 5;\\\\n //---//\\\\n state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1;\\\\n //--- DROPBITS(5) ---//\\\\n hold >>>= 5;\\\\n bits -= 5;\\\\n //---//\\\\n state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4;\\\\n //--- DROPBITS(4) ---//\\\\n hold >>>= 4;\\\\n bits -= 4;\\\\n //---//\\\\n//#ifndef PKZIP_BUG_WORKAROUND\\\\n if (state.nlen > 286 || state.ndist > 30) {\\\\n strm.msg = 'too many length or distance symbols';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n//#endif\\\\n //Tracev((stderr, \\\\\\\"inflate: table sizes ok\\\\\\\\n\\\\\\\"));\\\\n state.have = 0;\\\\n state.mode = LENLENS;\\\\n /* falls through */\\\\n case LENLENS:\\\\n while (state.have < state.ncode) {\\\\n //=== NEEDBITS(3);\\\\n while (bits < 3) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.lens[order[state.have++]] = (hold & 0x07);//BITS(3);\\\\n //--- DROPBITS(3) ---//\\\\n hold >>>= 3;\\\\n bits -= 3;\\\\n //---//\\\\n }\\\\n while (state.have < 19) {\\\\n state.lens[order[state.have++]] = 0;\\\\n }\\\\n // We have separate tables & no pointers. 2 commented lines below not needed.\\\\n //state.next = state.codes;\\\\n //state.lencode = state.next;\\\\n // Switch to use dynamic table\\\\n state.lencode = state.lendyn;\\\\n state.lenbits = 7;\\\\n\\\\n opts = { bits: state.lenbits };\\\\n ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts);\\\\n state.lenbits = opts.bits;\\\\n\\\\n if (ret) {\\\\n strm.msg = 'invalid code lengths set';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //Tracev((stderr, \\\\\\\"inflate: code lengths ok\\\\\\\\n\\\\\\\"));\\\\n state.have = 0;\\\\n state.mode = CODELENS;\\\\n /* falls through */\\\\n case CODELENS:\\\\n while (state.have < state.nlen + state.ndist) {\\\\n for (;;) {\\\\n here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/\\\\n here_bits = here >>> 24;\\\\n here_op = (here >>> 16) & 0xff;\\\\n here_val = here & 0xffff;\\\\n\\\\n if ((here_bits) <= bits) { break; }\\\\n //--- PULLBYTE() ---//\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n //---//\\\\n }\\\\n if (here_val < 16) {\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n state.lens[state.have++] = here_val;\\\\n }\\\\n else {\\\\n if (here_val === 16) {\\\\n //=== NEEDBITS(here.bits + 2);\\\\n n = here_bits + 2;\\\\n while (bits < n) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n if (state.have === 0) {\\\\n strm.msg = 'invalid bit length repeat';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n len = state.lens[state.have - 1];\\\\n copy = 3 + (hold & 0x03);//BITS(2);\\\\n //--- DROPBITS(2) ---//\\\\n hold >>>= 2;\\\\n bits -= 2;\\\\n //---//\\\\n }\\\\n else if (here_val === 17) {\\\\n //=== NEEDBITS(here.bits + 3);\\\\n n = here_bits + 3;\\\\n while (bits < n) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n len = 0;\\\\n copy = 3 + (hold & 0x07);//BITS(3);\\\\n //--- DROPBITS(3) ---//\\\\n hold >>>= 3;\\\\n bits -= 3;\\\\n //---//\\\\n }\\\\n else {\\\\n //=== NEEDBITS(here.bits + 7);\\\\n n = here_bits + 7;\\\\n while (bits < n) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n len = 0;\\\\n copy = 11 + (hold & 0x7f);//BITS(7);\\\\n //--- DROPBITS(7) ---//\\\\n hold >>>= 7;\\\\n bits -= 7;\\\\n //---//\\\\n }\\\\n if (state.have + copy > state.nlen + state.ndist) {\\\\n strm.msg = 'invalid bit length repeat';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n while (copy--) {\\\\n state.lens[state.have++] = len;\\\\n }\\\\n }\\\\n }\\\\n\\\\n /* handle error breaks in while */\\\\n if (state.mode === BAD) { break; }\\\\n\\\\n /* check for end-of-block code (better have one) */\\\\n if (state.lens[256] === 0) {\\\\n strm.msg = 'invalid code -- missing end-of-block';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n\\\\n /* build code tables -- note: do not change the lenbits or distbits\\\\n values here (9 and 6) without reading the comments in inftrees.h\\\\n concerning the ENOUGH constants, which depend on those values */\\\\n state.lenbits = 9;\\\\n\\\\n opts = { bits: state.lenbits };\\\\n ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts);\\\\n // We have separate tables & no pointers. 2 commented lines below not needed.\\\\n // state.next_index = opts.table_index;\\\\n state.lenbits = opts.bits;\\\\n // state.lencode = state.next;\\\\n\\\\n if (ret) {\\\\n strm.msg = 'invalid literal/lengths set';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n\\\\n state.distbits = 6;\\\\n //state.distcode.copy(state.codes);\\\\n // Switch to use dynamic table\\\\n state.distcode = state.distdyn;\\\\n opts = { bits: state.distbits };\\\\n ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts);\\\\n // We have separate tables & no pointers. 2 commented lines below not needed.\\\\n // state.next_index = opts.table_index;\\\\n state.distbits = opts.bits;\\\\n // state.distcode = state.next;\\\\n\\\\n if (ret) {\\\\n strm.msg = 'invalid distances set';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //Tracev((stderr, 'inflate: codes ok\\\\\\\\n'));\\\\n state.mode = LEN_;\\\\n if (flush === Z_TREES) { break inf_leave; }\\\\n /* falls through */\\\\n case LEN_:\\\\n state.mode = LEN;\\\\n /* falls through */\\\\n case LEN:\\\\n if (have >= 6 && left >= 258) {\\\\n //--- RESTORE() ---\\\\n strm.next_out = put;\\\\n strm.avail_out = left;\\\\n strm.next_in = next;\\\\n strm.avail_in = have;\\\\n state.hold = hold;\\\\n state.bits = bits;\\\\n //---\\\\n inflate_fast(strm, _out);\\\\n //--- LOAD() ---\\\\n put = strm.next_out;\\\\n output = strm.output;\\\\n left = strm.avail_out;\\\\n next = strm.next_in;\\\\n input = strm.input;\\\\n have = strm.avail_in;\\\\n hold = state.hold;\\\\n bits = state.bits;\\\\n //---\\\\n\\\\n if (state.mode === TYPE) {\\\\n state.back = -1;\\\\n }\\\\n break;\\\\n }\\\\n state.back = 0;\\\\n for (;;) {\\\\n here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/\\\\n here_bits = here >>> 24;\\\\n here_op = (here >>> 16) & 0xff;\\\\n here_val = here & 0xffff;\\\\n\\\\n if (here_bits <= bits) { break; }\\\\n //--- PULLBYTE() ---//\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n //---//\\\\n }\\\\n if (here_op && (here_op & 0xf0) === 0) {\\\\n last_bits = here_bits;\\\\n last_op = here_op;\\\\n last_val = here_val;\\\\n for (;;) {\\\\n here = state.lencode[last_val +\\\\n ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];\\\\n here_bits = here >>> 24;\\\\n here_op = (here >>> 16) & 0xff;\\\\n here_val = here & 0xffff;\\\\n\\\\n if ((last_bits + here_bits) <= bits) { break; }\\\\n //--- PULLBYTE() ---//\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n //---//\\\\n }\\\\n //--- DROPBITS(last.bits) ---//\\\\n hold >>>= last_bits;\\\\n bits -= last_bits;\\\\n //---//\\\\n state.back += last_bits;\\\\n }\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n state.back += here_bits;\\\\n state.length = here_val;\\\\n if (here_op === 0) {\\\\n //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?\\\\n // \\\\\\\"inflate: literal '%c'\\\\\\\\n\\\\\\\" :\\\\n // \\\\\\\"inflate: literal 0x%02x\\\\\\\\n\\\\\\\", here.val));\\\\n state.mode = LIT;\\\\n break;\\\\n }\\\\n if (here_op & 32) {\\\\n //Tracevv((stderr, \\\\\\\"inflate: end of block\\\\\\\\n\\\\\\\"));\\\\n state.back = -1;\\\\n state.mode = TYPE;\\\\n break;\\\\n }\\\\n if (here_op & 64) {\\\\n strm.msg = 'invalid literal/length code';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n state.extra = here_op & 15;\\\\n state.mode = LENEXT;\\\\n /* falls through */\\\\n case LENEXT:\\\\n if (state.extra) {\\\\n //=== NEEDBITS(state.extra);\\\\n n = state.extra;\\\\n while (bits < n) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;\\\\n //--- DROPBITS(state.extra) ---//\\\\n hold >>>= state.extra;\\\\n bits -= state.extra;\\\\n //---//\\\\n state.back += state.extra;\\\\n }\\\\n //Tracevv((stderr, \\\\\\\"inflate: length %u\\\\\\\\n\\\\\\\", state.length));\\\\n state.was = state.length;\\\\n state.mode = DIST;\\\\n /* falls through */\\\\n case DIST:\\\\n for (;;) {\\\\n here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/\\\\n here_bits = here >>> 24;\\\\n here_op = (here >>> 16) & 0xff;\\\\n here_val = here & 0xffff;\\\\n\\\\n if ((here_bits) <= bits) { break; }\\\\n //--- PULLBYTE() ---//\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n //---//\\\\n }\\\\n if ((here_op & 0xf0) === 0) {\\\\n last_bits = here_bits;\\\\n last_op = here_op;\\\\n last_val = here_val;\\\\n for (;;) {\\\\n here = state.distcode[last_val +\\\\n ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];\\\\n here_bits = here >>> 24;\\\\n here_op = (here >>> 16) & 0xff;\\\\n here_val = here & 0xffff;\\\\n\\\\n if ((last_bits + here_bits) <= bits) { break; }\\\\n //--- PULLBYTE() ---//\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n //---//\\\\n }\\\\n //--- DROPBITS(last.bits) ---//\\\\n hold >>>= last_bits;\\\\n bits -= last_bits;\\\\n //---//\\\\n state.back += last_bits;\\\\n }\\\\n //--- DROPBITS(here.bits) ---//\\\\n hold >>>= here_bits;\\\\n bits -= here_bits;\\\\n //---//\\\\n state.back += here_bits;\\\\n if (here_op & 64) {\\\\n strm.msg = 'invalid distance code';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n state.offset = here_val;\\\\n state.extra = (here_op) & 15;\\\\n state.mode = DISTEXT;\\\\n /* falls through */\\\\n case DISTEXT:\\\\n if (state.extra) {\\\\n //=== NEEDBITS(state.extra);\\\\n n = state.extra;\\\\n while (bits < n) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;\\\\n //--- DROPBITS(state.extra) ---//\\\\n hold >>>= state.extra;\\\\n bits -= state.extra;\\\\n //---//\\\\n state.back += state.extra;\\\\n }\\\\n//#ifdef INFLATE_STRICT\\\\n if (state.offset > state.dmax) {\\\\n strm.msg = 'invalid distance too far back';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n//#endif\\\\n //Tracevv((stderr, \\\\\\\"inflate: distance %u\\\\\\\\n\\\\\\\", state.offset));\\\\n state.mode = MATCH;\\\\n /* falls through */\\\\n case MATCH:\\\\n if (left === 0) { break inf_leave; }\\\\n copy = _out - left;\\\\n if (state.offset > copy) { /* copy from window */\\\\n copy = state.offset - copy;\\\\n if (copy > state.whave) {\\\\n if (state.sane) {\\\\n strm.msg = 'invalid distance too far back';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n// (!) This block is disabled in zlib defaults,\\\\n// don't enable it for binary compatibility\\\\n//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR\\\\n// Trace((stderr, \\\\\\\"inflate.c too far\\\\\\\\n\\\\\\\"));\\\\n// copy -= state.whave;\\\\n// if (copy > state.length) { copy = state.length; }\\\\n// if (copy > left) { copy = left; }\\\\n// left -= copy;\\\\n// state.length -= copy;\\\\n// do {\\\\n// output[put++] = 0;\\\\n// } while (--copy);\\\\n// if (state.length === 0) { state.mode = LEN; }\\\\n// break;\\\\n//#endif\\\\n }\\\\n if (copy > state.wnext) {\\\\n copy -= state.wnext;\\\\n from = state.wsize - copy;\\\\n }\\\\n else {\\\\n from = state.wnext - copy;\\\\n }\\\\n if (copy > state.length) { copy = state.length; }\\\\n from_source = state.window;\\\\n }\\\\n else { /* copy from output */\\\\n from_source = output;\\\\n from = put - state.offset;\\\\n copy = state.length;\\\\n }\\\\n if (copy > left) { copy = left; }\\\\n left -= copy;\\\\n state.length -= copy;\\\\n do {\\\\n output[put++] = from_source[from++];\\\\n } while (--copy);\\\\n if (state.length === 0) { state.mode = LEN; }\\\\n break;\\\\n case LIT:\\\\n if (left === 0) { break inf_leave; }\\\\n output[put++] = state.length;\\\\n left--;\\\\n state.mode = LEN;\\\\n break;\\\\n case CHECK:\\\\n if (state.wrap) {\\\\n //=== NEEDBITS(32);\\\\n while (bits < 32) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n // Use '|' instead of '+' to make sure that result is signed\\\\n hold |= input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n _out -= left;\\\\n strm.total_out += _out;\\\\n state.total += _out;\\\\n if (_out) {\\\\n strm.adler = state.check =\\\\n /*UPDATE(state.check, put - _out, _out);*/\\\\n (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out));\\\\n\\\\n }\\\\n _out = left;\\\\n // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too\\\\n if ((state.flags ? hold : zswap32(hold)) !== state.check) {\\\\n strm.msg = 'incorrect data check';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n //Tracev((stderr, \\\\\\\"inflate: check matches trailer\\\\\\\\n\\\\\\\"));\\\\n }\\\\n state.mode = LENGTH;\\\\n /* falls through */\\\\n case LENGTH:\\\\n if (state.wrap && state.flags) {\\\\n //=== NEEDBITS(32);\\\\n while (bits < 32) {\\\\n if (have === 0) { break inf_leave; }\\\\n have--;\\\\n hold += input[next++] << bits;\\\\n bits += 8;\\\\n }\\\\n //===//\\\\n if (hold !== (state.total & 0xffffffff)) {\\\\n strm.msg = 'incorrect length check';\\\\n state.mode = BAD;\\\\n break;\\\\n }\\\\n //=== INITBITS();\\\\n hold = 0;\\\\n bits = 0;\\\\n //===//\\\\n //Tracev((stderr, \\\\\\\"inflate: length matches trailer\\\\\\\\n\\\\\\\"));\\\\n }\\\\n state.mode = DONE;\\\\n /* falls through */\\\\n case DONE:\\\\n ret = Z_STREAM_END;\\\\n break inf_leave;\\\\n case BAD:\\\\n ret = Z_DATA_ERROR;\\\\n break inf_leave;\\\\n case MEM:\\\\n return Z_MEM_ERROR;\\\\n case SYNC:\\\\n /* falls through */\\\\n default:\\\\n return Z_STREAM_ERROR;\\\\n }\\\\n }\\\\n\\\\n // inf_leave <- here is real place for \\\\\\\"goto inf_leave\\\\\\\", emulated via \\\\\\\"break inf_leave\\\\\\\"\\\\n\\\\n /*\\\\n Return from inflate(), updating the total counts and the check value.\\\\n If there was no progress during the inflate() call, return a buffer\\\\n error. Call updatewindow() to create and/or update the window state.\\\\n Note: a memory error from inflate() is non-recoverable.\\\\n */\\\\n\\\\n //--- RESTORE() ---\\\\n strm.next_out = put;\\\\n strm.avail_out = left;\\\\n strm.next_in = next;\\\\n strm.avail_in = have;\\\\n state.hold = hold;\\\\n state.bits = bits;\\\\n //---\\\\n\\\\n if (state.wsize || (_out !== strm.avail_out && state.mode < BAD &&\\\\n (state.mode < CHECK || flush !== Z_FINISH))) {\\\\n if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) {\\\\n state.mode = MEM;\\\\n return Z_MEM_ERROR;\\\\n }\\\\n }\\\\n _in -= strm.avail_in;\\\\n _out -= strm.avail_out;\\\\n strm.total_in += _in;\\\\n strm.total_out += _out;\\\\n state.total += _out;\\\\n if (state.wrap && _out) {\\\\n strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/\\\\n (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out));\\\\n }\\\\n strm.data_type = state.bits + (state.last ? 64 : 0) +\\\\n (state.mode === TYPE ? 128 : 0) +\\\\n (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0);\\\\n if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) {\\\\n ret = Z_BUF_ERROR;\\\\n }\\\\n return ret;\\\\n}\\\\n\\\\nfunction inflateEnd(strm) {\\\\n\\\\n if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) {\\\\n return Z_STREAM_ERROR;\\\\n }\\\\n\\\\n var state = strm.state;\\\\n if (state.window) {\\\\n state.window = null;\\\\n }\\\\n strm.state = null;\\\\n return Z_OK;\\\\n}\\\\n\\\\nfunction inflateGetHeader(strm, head) {\\\\n var state;\\\\n\\\\n /* check state */\\\\n if (!strm || !strm.state) { return Z_STREAM_ERROR; }\\\\n state = strm.state;\\\\n if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; }\\\\n\\\\n /* save header structure */\\\\n state.head = head;\\\\n head.done = false;\\\\n return Z_OK;\\\\n}\\\\n\\\\nfunction inflateSetDictionary(strm, dictionary) {\\\\n var dictLength = dictionary.length;\\\\n\\\\n var state;\\\\n var dictid;\\\\n var ret;\\\\n\\\\n /* check state */\\\\n if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; }\\\\n state = strm.state;\\\\n\\\\n if (state.wrap !== 0 && state.mode !== DICT) {\\\\n return Z_STREAM_ERROR;\\\\n }\\\\n\\\\n /* check for correct dictionary identifier */\\\\n if (state.mode === DICT) {\\\\n dictid = 1; /* adler32(0, null, 0)*/\\\\n /* dictid = adler32(dictid, dictionary, dictLength); */\\\\n dictid = adler32(dictid, dictionary, dictLength, 0);\\\\n if (dictid !== state.check) {\\\\n return Z_DATA_ERROR;\\\\n }\\\\n }\\\\n /* copy dictionary to window using updatewindow(), which will amend the\\\\n existing dictionary if appropriate */\\\\n ret = updatewindow(strm, dictionary, dictLength, dictLength);\\\\n if (ret) {\\\\n state.mode = MEM;\\\\n return Z_MEM_ERROR;\\\\n }\\\\n state.havedict = 1;\\\\n // Tracev((stderr, \\\\\\\"inflate: dictionary set\\\\\\\\n\\\\\\\"));\\\\n return Z_OK;\\\\n}\\\\n\\\\nexports.inflateReset = inflateReset;\\\\nexports.inflateReset2 = inflateReset2;\\\\nexports.inflateResetKeep = inflateResetKeep;\\\\nexports.inflateInit = inflateInit;\\\\nexports.inflateInit2 = inflateInit2;\\\\nexports.inflate = inflate;\\\\nexports.inflateEnd = inflateEnd;\\\\nexports.inflateGetHeader = inflateGetHeader;\\\\nexports.inflateSetDictionary = inflateSetDictionary;\\\\nexports.inflateInfo = 'pako inflate (from Nodeca project)';\\\\n\\\\n/* Not implemented\\\\nexports.inflateCopy = inflateCopy;\\\\nexports.inflateGetDictionary = inflateGetDictionary;\\\\nexports.inflateMark = inflateMark;\\\\nexports.inflatePrime = inflatePrime;\\\\nexports.inflateSync = inflateSync;\\\\nexports.inflateSyncPoint = inflateSyncPoint;\\\\nexports.inflateUndermine = inflateUndermine;\\\\n*/\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inflate.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/inftrees.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/inftrees.js ***!\\n \\\\************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nvar utils = __webpack_require__(/*! ../utils/common */ \\\\\\\"./node_modules/pako/lib/utils/common.js\\\\\\\");\\\\n\\\\nvar MAXBITS = 15;\\\\nvar ENOUGH_LENS = 852;\\\\nvar ENOUGH_DISTS = 592;\\\\n//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);\\\\n\\\\nvar CODES = 0;\\\\nvar LENS = 1;\\\\nvar DISTS = 2;\\\\n\\\\nvar lbase = [ /* Length codes 257..285 base */\\\\n 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,\\\\n 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0\\\\n];\\\\n\\\\nvar lext = [ /* Length codes 257..285 extra */\\\\n 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,\\\\n 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78\\\\n];\\\\n\\\\nvar dbase = [ /* Distance codes 0..29 base */\\\\n 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,\\\\n 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,\\\\n 8193, 12289, 16385, 24577, 0, 0\\\\n];\\\\n\\\\nvar dext = [ /* Distance codes 0..29 extra */\\\\n 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,\\\\n 23, 23, 24, 24, 25, 25, 26, 26, 27, 27,\\\\n 28, 28, 29, 29, 64, 64\\\\n];\\\\n\\\\nmodule.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts)\\\\n{\\\\n var bits = opts.bits;\\\\n //here = opts.here; /* table entry for duplication */\\\\n\\\\n var len = 0; /* a code's length in bits */\\\\n var sym = 0; /* index of code symbols */\\\\n var min = 0, max = 0; /* minimum and maximum code lengths */\\\\n var root = 0; /* number of index bits for root table */\\\\n var curr = 0; /* number of index bits for current table */\\\\n var drop = 0; /* code bits to drop for sub-table */\\\\n var left = 0; /* number of prefix codes available */\\\\n var used = 0; /* code entries in table used */\\\\n var huff = 0; /* Huffman code */\\\\n var incr; /* for incrementing code, index */\\\\n var fill; /* index for replicating entries */\\\\n var low; /* low bits for current root entry */\\\\n var mask; /* mask for low root bits */\\\\n var next; /* next available space in table */\\\\n var base = null; /* base value table to use */\\\\n var base_index = 0;\\\\n// var shoextra; /* extra bits table to use */\\\\n var end; /* use base and extra for symbol > end */\\\\n var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */\\\\n var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */\\\\n var extra = null;\\\\n var extra_index = 0;\\\\n\\\\n var here_bits, here_op, here_val;\\\\n\\\\n /*\\\\n Process a set of code lengths to create a canonical Huffman code. The\\\\n code lengths are lens[0..codes-1]. Each length corresponds to the\\\\n symbols 0..codes-1. The Huffman code is generated by first sorting the\\\\n symbols by length from short to long, and retaining the symbol order\\\\n for codes with equal lengths. Then the code starts with all zero bits\\\\n for the first code of the shortest length, and the codes are integer\\\\n increments for the same length, and zeros are appended as the length\\\\n increases. For the deflate format, these bits are stored backwards\\\\n from their more natural integer increment ordering, and so when the\\\\n decoding tables are built in the large loop below, the integer codes\\\\n are incremented backwards.\\\\n\\\\n This routine assumes, but does not check, that all of the entries in\\\\n lens[] are in the range 0..MAXBITS. The caller must assure this.\\\\n 1..MAXBITS is interpreted as that code length. zero means that that\\\\n symbol does not occur in this code.\\\\n\\\\n The codes are sorted by computing a count of codes for each length,\\\\n creating from that a table of starting indices for each length in the\\\\n sorted table, and then entering the symbols in order in the sorted\\\\n table. The sorted table is work[], with that space being provided by\\\\n the caller.\\\\n\\\\n The length counts are used for other purposes as well, i.e. finding\\\\n the minimum and maximum length codes, determining if there are any\\\\n codes at all, checking for a valid set of lengths, and looking ahead\\\\n at length counts to determine sub-table sizes when building the\\\\n decoding tables.\\\\n */\\\\n\\\\n /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */\\\\n for (len = 0; len <= MAXBITS; len++) {\\\\n count[len] = 0;\\\\n }\\\\n for (sym = 0; sym < codes; sym++) {\\\\n count[lens[lens_index + sym]]++;\\\\n }\\\\n\\\\n /* bound code lengths, force root to be within code lengths */\\\\n root = bits;\\\\n for (max = MAXBITS; max >= 1; max--) {\\\\n if (count[max] !== 0) { break; }\\\\n }\\\\n if (root > max) {\\\\n root = max;\\\\n }\\\\n if (max === 0) { /* no symbols to code at all */\\\\n //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */\\\\n //table.bits[opts.table_index] = 1; //here.bits = (var char)1;\\\\n //table.val[opts.table_index++] = 0; //here.val = (var short)0;\\\\n table[table_index++] = (1 << 24) | (64 << 16) | 0;\\\\n\\\\n\\\\n //table.op[opts.table_index] = 64;\\\\n //table.bits[opts.table_index] = 1;\\\\n //table.val[opts.table_index++] = 0;\\\\n table[table_index++] = (1 << 24) | (64 << 16) | 0;\\\\n\\\\n opts.bits = 1;\\\\n return 0; /* no symbols, but wait for decoding to report error */\\\\n }\\\\n for (min = 1; min < max; min++) {\\\\n if (count[min] !== 0) { break; }\\\\n }\\\\n if (root < min) {\\\\n root = min;\\\\n }\\\\n\\\\n /* check for an over-subscribed or incomplete set of lengths */\\\\n left = 1;\\\\n for (len = 1; len <= MAXBITS; len++) {\\\\n left <<= 1;\\\\n left -= count[len];\\\\n if (left < 0) {\\\\n return -1;\\\\n } /* over-subscribed */\\\\n }\\\\n if (left > 0 && (type === CODES || max !== 1)) {\\\\n return -1; /* incomplete set */\\\\n }\\\\n\\\\n /* generate offsets into symbol table for each length for sorting */\\\\n offs[1] = 0;\\\\n for (len = 1; len < MAXBITS; len++) {\\\\n offs[len + 1] = offs[len] + count[len];\\\\n }\\\\n\\\\n /* sort symbols by length, by symbol order within each length */\\\\n for (sym = 0; sym < codes; sym++) {\\\\n if (lens[lens_index + sym] !== 0) {\\\\n work[offs[lens[lens_index + sym]]++] = sym;\\\\n }\\\\n }\\\\n\\\\n /*\\\\n Create and fill in decoding tables. In this loop, the table being\\\\n filled is at next and has curr index bits. The code being used is huff\\\\n with length len. That code is converted to an index by dropping drop\\\\n bits off of the bottom. For codes where len is less than drop + curr,\\\\n those top drop + curr - len bits are incremented through all values to\\\\n fill the table with replicated entries.\\\\n\\\\n root is the number of index bits for the root table. When len exceeds\\\\n root, sub-tables are created pointed to by the root entry with an index\\\\n of the low root bits of huff. This is saved in low to check for when a\\\\n new sub-table should be started. drop is zero when the root table is\\\\n being filled, and drop is root when sub-tables are being filled.\\\\n\\\\n When a new sub-table is needed, it is necessary to look ahead in the\\\\n code lengths to determine what size sub-table is needed. The length\\\\n counts are used for this, and so count[] is decremented as codes are\\\\n entered in the tables.\\\\n\\\\n used keeps track of how many table entries have been allocated from the\\\\n provided *table space. It is checked for LENS and DIST tables against\\\\n the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in\\\\n the initial root table size constants. See the comments in inftrees.h\\\\n for more information.\\\\n\\\\n sym increments through all symbols, and the loop terminates when\\\\n all codes of length max, i.e. all codes, have been processed. This\\\\n routine permits incomplete codes, so another loop after this one fills\\\\n in the rest of the decoding tables with invalid code markers.\\\\n */\\\\n\\\\n /* set up for code type */\\\\n // poor man optimization - use if-else instead of switch,\\\\n // to avoid deopts in old v8\\\\n if (type === CODES) {\\\\n base = extra = work; /* dummy value--not used */\\\\n end = 19;\\\\n\\\\n } else if (type === LENS) {\\\\n base = lbase;\\\\n base_index -= 257;\\\\n extra = lext;\\\\n extra_index -= 257;\\\\n end = 256;\\\\n\\\\n } else { /* DISTS */\\\\n base = dbase;\\\\n extra = dext;\\\\n end = -1;\\\\n }\\\\n\\\\n /* initialize opts for loop */\\\\n huff = 0; /* starting code */\\\\n sym = 0; /* starting code symbol */\\\\n len = min; /* starting code length */\\\\n next = table_index; /* current table to fill in */\\\\n curr = root; /* current table index bits */\\\\n drop = 0; /* current bits to drop from code for index */\\\\n low = -1; /* trigger new sub-table when len > root */\\\\n used = 1 << root; /* use root table entries */\\\\n mask = used - 1; /* mask for comparing low */\\\\n\\\\n /* check available table space */\\\\n if ((type === LENS && used > ENOUGH_LENS) ||\\\\n (type === DISTS && used > ENOUGH_DISTS)) {\\\\n return 1;\\\\n }\\\\n\\\\n /* process all codes and make table entries */\\\\n for (;;) {\\\\n /* create table entry */\\\\n here_bits = len - drop;\\\\n if (work[sym] < end) {\\\\n here_op = 0;\\\\n here_val = work[sym];\\\\n }\\\\n else if (work[sym] > end) {\\\\n here_op = extra[extra_index + work[sym]];\\\\n here_val = base[base_index + work[sym]];\\\\n }\\\\n else {\\\\n here_op = 32 + 64; /* end of block */\\\\n here_val = 0;\\\\n }\\\\n\\\\n /* replicate for those indices with low len bits equal to huff */\\\\n incr = 1 << (len - drop);\\\\n fill = 1 << curr;\\\\n min = fill; /* save offset to next table */\\\\n do {\\\\n fill -= incr;\\\\n table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0;\\\\n } while (fill !== 0);\\\\n\\\\n /* backwards increment the len-bit code huff */\\\\n incr = 1 << (len - 1);\\\\n while (huff & incr) {\\\\n incr >>= 1;\\\\n }\\\\n if (incr !== 0) {\\\\n huff &= incr - 1;\\\\n huff += incr;\\\\n } else {\\\\n huff = 0;\\\\n }\\\\n\\\\n /* go to next symbol, update count, len */\\\\n sym++;\\\\n if (--count[len] === 0) {\\\\n if (len === max) { break; }\\\\n len = lens[lens_index + work[sym]];\\\\n }\\\\n\\\\n /* create new sub-table if needed */\\\\n if (len > root && (huff & mask) !== low) {\\\\n /* if first time, transition to sub-tables */\\\\n if (drop === 0) {\\\\n drop = root;\\\\n }\\\\n\\\\n /* increment past last table */\\\\n next += min; /* here min is 1 << curr */\\\\n\\\\n /* determine length of next table */\\\\n curr = len - drop;\\\\n left = 1 << curr;\\\\n while (curr + drop < max) {\\\\n left -= count[curr + drop];\\\\n if (left <= 0) { break; }\\\\n curr++;\\\\n left <<= 1;\\\\n }\\\\n\\\\n /* check for enough space */\\\\n used += 1 << curr;\\\\n if ((type === LENS && used > ENOUGH_LENS) ||\\\\n (type === DISTS && used > ENOUGH_DISTS)) {\\\\n return 1;\\\\n }\\\\n\\\\n /* point entry in root table to sub-table */\\\\n low = huff & mask;\\\\n /*table.op[low] = curr;\\\\n table.bits[low] = root;\\\\n table.val[low] = next - opts.table_index;*/\\\\n table[low] = (root << 24) | (curr << 16) | (next - table_index) |0;\\\\n }\\\\n }\\\\n\\\\n /* fill in remaining table entry if code is incomplete (guaranteed to have\\\\n at most one remaining entry, since if the code is incomplete, the\\\\n maximum code length that was allowed to get this far is one bit) */\\\\n if (huff !== 0) {\\\\n //table.op[next + huff] = 64; /* invalid code marker */\\\\n //table.bits[next + huff] = len - drop;\\\\n //table.val[next + huff] = 0;\\\\n table[next + huff] = ((len - drop) << 24) | (64 << 16) |0;\\\\n }\\\\n\\\\n /* set return parameters */\\\\n //opts.table_index += used;\\\\n opts.bits = root;\\\\n return 0;\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/inftrees.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/messages.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/messages.js ***!\\n \\\\************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nmodule.exports = {\\\\n 2: 'need dictionary', /* Z_NEED_DICT 2 */\\\\n 1: 'stream end', /* Z_STREAM_END 1 */\\\\n 0: '', /* Z_OK 0 */\\\\n '-1': 'file error', /* Z_ERRNO (-1) */\\\\n '-2': 'stream error', /* Z_STREAM_ERROR (-2) */\\\\n '-3': 'data error', /* Z_DATA_ERROR (-3) */\\\\n '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */\\\\n '-5': 'buffer error', /* Z_BUF_ERROR (-5) */\\\\n '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/messages.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/pako/lib/zlib/zstream.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/pako/lib/zlib/zstream.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// (C) 1995-2013 Jean-loup Gailly and Mark Adler\\\\n// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin\\\\n//\\\\n// This software is provided 'as-is', without any express or implied\\\\n// warranty. In no event will the authors be held liable for any damages\\\\n// arising from the use of this software.\\\\n//\\\\n// Permission is granted to anyone to use this software for any purpose,\\\\n// including commercial applications, and to alter it and redistribute it\\\\n// freely, subject to the following restrictions:\\\\n//\\\\n// 1. The origin of this software must not be misrepresented; you must not\\\\n// claim that you wrote the original software. If you use this software\\\\n// in a product, an acknowledgment in the product documentation would be\\\\n// appreciated but is not required.\\\\n// 2. Altered source versions must be plainly marked as such, and must not be\\\\n// misrepresented as being the original software.\\\\n// 3. This notice may not be removed or altered from any source distribution.\\\\n\\\\nfunction ZStream() {\\\\n /* next input byte */\\\\n this.input = null; // JS specific, because we have no pointers\\\\n this.next_in = 0;\\\\n /* number of bytes available at input */\\\\n this.avail_in = 0;\\\\n /* total number of input bytes read so far */\\\\n this.total_in = 0;\\\\n /* next output byte should be put there */\\\\n this.output = null; // JS specific, because we have no pointers\\\\n this.next_out = 0;\\\\n /* remaining free space at output */\\\\n this.avail_out = 0;\\\\n /* total number of bytes output so far */\\\\n this.total_out = 0;\\\\n /* last error message, NULL if no error */\\\\n this.msg = ''/*Z_NULL*/;\\\\n /* not visible by applications */\\\\n this.state = null;\\\\n /* best guess about the data type: binary or text */\\\\n this.data_type = 2/*Z_UNKNOWN*/;\\\\n /* adler32 value of the uncompressed data */\\\\n this.adler = 0;\\\\n}\\\\n\\\\nmodule.exports = ZStream;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/pako/lib/zlib/zstream.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/process-nextick-args/index.js\\\":\\n/*!****************************************************!*\\\\\\n !*** ./node_modules/process-nextick-args/index.js ***!\\n \\\\****************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nif (typeof process === 'undefined' ||\\\\n !process.version ||\\\\n process.version.indexOf('v0.') === 0 ||\\\\n process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {\\\\n module.exports = { nextTick: nextTick };\\\\n} else {\\\\n module.exports = process\\\\n}\\\\n\\\\nfunction nextTick(fn, arg1, arg2, arg3) {\\\\n if (typeof fn !== 'function') {\\\\n throw new TypeError('\\\\\\\"callback\\\\\\\" argument must be a function');\\\\n }\\\\n var len = arguments.length;\\\\n var args, i;\\\\n switch (len) {\\\\n case 0:\\\\n case 1:\\\\n return process.nextTick(fn);\\\\n case 2:\\\\n return process.nextTick(function afterTickOne() {\\\\n fn.call(null, arg1);\\\\n });\\\\n case 3:\\\\n return process.nextTick(function afterTickTwo() {\\\\n fn.call(null, arg1, arg2);\\\\n });\\\\n case 4:\\\\n return process.nextTick(function afterTickThree() {\\\\n fn.call(null, arg1, arg2, arg3);\\\\n });\\\\n default:\\\\n args = new Array(len - 1);\\\\n i = 0;\\\\n while (i < args.length) {\\\\n args[i++] = arguments[i];\\\\n }\\\\n return process.nextTick(function afterTick() {\\\\n fn.apply(null, args);\\\\n });\\\\n }\\\\n}\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/process-nextick-args/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\":\\n/*!************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/_stream_duplex.js ***!\\n \\\\************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n// a duplex stream is just a stream that is both readable and writable.\\\\n// Since JS doesn't have multiple prototypal inheritance, this class\\\\n// prototypally inherits from Readable, and then parasitically from\\\\n// Writable.\\\\n\\\\n\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar pna = __webpack_require__(/*! process-nextick-args */ \\\\\\\"./node_modules/process-nextick-args/index.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar objectKeys = Object.keys || function (obj) {\\\\n var keys = [];\\\\n for (var key in obj) {\\\\n keys.push(key);\\\\n }return keys;\\\\n};\\\\n/*</replacement>*/\\\\n\\\\nmodule.exports = Duplex;\\\\n\\\\n/*<replacement>*/\\\\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \\\\\\\"./node_modules/core-util-is/lib/util.js\\\\\\\"));\\\\nutil.inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\nvar Readable = __webpack_require__(/*! ./_stream_readable */ \\\\\\\"./node_modules/readable-stream/lib/_stream_readable.js\\\\\\\");\\\\nvar Writable = __webpack_require__(/*! ./_stream_writable */ \\\\\\\"./node_modules/readable-stream/lib/_stream_writable.js\\\\\\\");\\\\n\\\\nutil.inherits(Duplex, Readable);\\\\n\\\\n{\\\\n // avoid scope creep, the keys array can then be collected\\\\n var keys = objectKeys(Writable.prototype);\\\\n for (var v = 0; v < keys.length; v++) {\\\\n var method = keys[v];\\\\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\\\\n }\\\\n}\\\\n\\\\nfunction Duplex(options) {\\\\n if (!(this instanceof Duplex)) return new Duplex(options);\\\\n\\\\n Readable.call(this, options);\\\\n Writable.call(this, options);\\\\n\\\\n if (options && options.readable === false) this.readable = false;\\\\n\\\\n if (options && options.writable === false) this.writable = false;\\\\n\\\\n this.allowHalfOpen = true;\\\\n if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;\\\\n\\\\n this.once('end', onend);\\\\n}\\\\n\\\\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\\\\n // making it explicit this property is not enumerable\\\\n // because otherwise some prototype manipulation in\\\\n // userland will fail\\\\n enumerable: false,\\\\n get: function () {\\\\n return this._writableState.highWaterMark;\\\\n }\\\\n});\\\\n\\\\n// the no-half-open enforcer\\\\nfunction onend() {\\\\n // if we allow half-open state, or if the writable side ended,\\\\n // then we're ok.\\\\n if (this.allowHalfOpen || this._writableState.ended) return;\\\\n\\\\n // no more data can be written.\\\\n // But allow more writes to happen in this tick.\\\\n pna.nextTick(onEndNT, this);\\\\n}\\\\n\\\\nfunction onEndNT(self) {\\\\n self.end();\\\\n}\\\\n\\\\nObject.defineProperty(Duplex.prototype, 'destroyed', {\\\\n get: function () {\\\\n if (this._readableState === undefined || this._writableState === undefined) {\\\\n return false;\\\\n }\\\\n return this._readableState.destroyed && this._writableState.destroyed;\\\\n },\\\\n set: function (value) {\\\\n // we ignore the value if the stream\\\\n // has not been initialized yet\\\\n if (this._readableState === undefined || this._writableState === undefined) {\\\\n return;\\\\n }\\\\n\\\\n // backward compatibility, the user is explicitly\\\\n // managing destroyed\\\\n this._readableState.destroyed = value;\\\\n this._writableState.destroyed = value;\\\\n }\\\\n});\\\\n\\\\nDuplex.prototype._destroy = function (err, cb) {\\\\n this.push(null);\\\\n this.end();\\\\n\\\\n pna.nextTick(cb, err);\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_duplex.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/_stream_passthrough.js\\\":\\n/*!*****************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/_stream_passthrough.js ***!\\n \\\\*****************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n// a passthrough stream.\\\\n// basically just the most minimal sort of Transform stream.\\\\n// Every written chunk gets output as-is.\\\\n\\\\n\\\\n\\\\nmodule.exports = PassThrough;\\\\n\\\\nvar Transform = __webpack_require__(/*! ./_stream_transform */ \\\\\\\"./node_modules/readable-stream/lib/_stream_transform.js\\\\\\\");\\\\n\\\\n/*<replacement>*/\\\\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \\\\\\\"./node_modules/core-util-is/lib/util.js\\\\\\\"));\\\\nutil.inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\nutil.inherits(PassThrough, Transform);\\\\n\\\\nfunction PassThrough(options) {\\\\n if (!(this instanceof PassThrough)) return new PassThrough(options);\\\\n\\\\n Transform.call(this, options);\\\\n}\\\\n\\\\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\\\\n cb(null, chunk);\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_passthrough.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/_stream_readable.js\\\":\\n/*!**************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/_stream_readable.js ***!\\n \\\\**************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar pna = __webpack_require__(/*! process-nextick-args */ \\\\\\\"./node_modules/process-nextick-args/index.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\nmodule.exports = Readable;\\\\n\\\\n/*<replacement>*/\\\\nvar isArray = __webpack_require__(/*! isarray */ \\\\\\\"./node_modules/isarray/index.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar Duplex;\\\\n/*</replacement>*/\\\\n\\\\nReadable.ReadableState = ReadableState;\\\\n\\\\n/*<replacement>*/\\\\nvar EE = __webpack_require__(/*! events */ \\\\\\\"events\\\\\\\").EventEmitter;\\\\n\\\\nvar EElistenerCount = function (emitter, type) {\\\\n return emitter.listeners(type).length;\\\\n};\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar Stream = __webpack_require__(/*! ./internal/streams/stream */ \\\\\\\"./node_modules/readable-stream/lib/internal/streams/stream.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar Buffer = __webpack_require__(/*! safe-buffer */ \\\\\\\"./node_modules/readable-stream/node_modules/safe-buffer/index.js\\\\\\\").Buffer;\\\\nvar OurUint8Array = global.Uint8Array || function () {};\\\\nfunction _uint8ArrayToBuffer(chunk) {\\\\n return Buffer.from(chunk);\\\\n}\\\\nfunction _isUint8Array(obj) {\\\\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\\\\n}\\\\n\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \\\\\\\"./node_modules/core-util-is/lib/util.js\\\\\\\"));\\\\nutil.inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar debugUtil = __webpack_require__(/*! util */ \\\\\\\"util\\\\\\\");\\\\nvar debug = void 0;\\\\nif (debugUtil && debugUtil.debuglog) {\\\\n debug = debugUtil.debuglog('stream');\\\\n} else {\\\\n debug = function () {};\\\\n}\\\\n/*</replacement>*/\\\\n\\\\nvar BufferList = __webpack_require__(/*! ./internal/streams/BufferList */ \\\\\\\"./node_modules/readable-stream/lib/internal/streams/BufferList.js\\\\\\\");\\\\nvar destroyImpl = __webpack_require__(/*! ./internal/streams/destroy */ \\\\\\\"./node_modules/readable-stream/lib/internal/streams/destroy.js\\\\\\\");\\\\nvar StringDecoder;\\\\n\\\\nutil.inherits(Readable, Stream);\\\\n\\\\nvar kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\\\\n\\\\nfunction prependListener(emitter, event, fn) {\\\\n // Sadly this is not cacheable as some libraries bundle their own\\\\n // event emitter implementation with them.\\\\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\\\\n\\\\n // This is a hack to make sure that our error handler is attached before any\\\\n // userland ones. NEVER DO THIS. This is here only because this code needs\\\\n // to continue to work with older versions of Node.js that do not include\\\\n // the prependListener() method. The goal is to eventually remove this hack.\\\\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\\\\n}\\\\n\\\\nfunction ReadableState(options, stream) {\\\\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n\\\\n options = options || {};\\\\n\\\\n // Duplex streams are both readable and writable, but share\\\\n // the same options object.\\\\n // However, some cases require setting options to different\\\\n // values for the readable and the writable sides of the duplex stream.\\\\n // These options can be provided separately as readableXXX and writableXXX.\\\\n var isDuplex = stream instanceof Duplex;\\\\n\\\\n // object stream flag. Used to make read(n) ignore n and to\\\\n // make all the buffer merging and length checks go away\\\\n this.objectMode = !!options.objectMode;\\\\n\\\\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\\\\n\\\\n // the point at which it stops calling _read() to fill the buffer\\\\n // Note: 0 is a valid value, means \\\\\\\"don't call _read preemptively ever\\\\\\\"\\\\n var hwm = options.highWaterMark;\\\\n var readableHwm = options.readableHighWaterMark;\\\\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\\\\n\\\\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;\\\\n\\\\n // cast to ints.\\\\n this.highWaterMark = Math.floor(this.highWaterMark);\\\\n\\\\n // A linked list is used to store data chunks instead of an array because the\\\\n // linked list can remove elements from the beginning faster than\\\\n // array.shift()\\\\n this.buffer = new BufferList();\\\\n this.length = 0;\\\\n this.pipes = null;\\\\n this.pipesCount = 0;\\\\n this.flowing = null;\\\\n this.ended = false;\\\\n this.endEmitted = false;\\\\n this.reading = false;\\\\n\\\\n // a flag to be able to tell if the event 'readable'/'data' is emitted\\\\n // immediately, or on a later tick. We set this to true at first, because\\\\n // any actions that shouldn't happen until \\\\\\\"later\\\\\\\" should generally also\\\\n // not happen before the first read call.\\\\n this.sync = true;\\\\n\\\\n // whenever we return null, then we set a flag to say\\\\n // that we're awaiting a 'readable' event emission.\\\\n this.needReadable = false;\\\\n this.emittedReadable = false;\\\\n this.readableListening = false;\\\\n this.resumeScheduled = false;\\\\n\\\\n // has it been destroyed\\\\n this.destroyed = false;\\\\n\\\\n // Crypto is kind of old and crusty. Historically, its default string\\\\n // encoding is 'binary' so we have to make this configurable.\\\\n // Everything else in the universe uses 'utf8', though.\\\\n this.defaultEncoding = options.defaultEncoding || 'utf8';\\\\n\\\\n // the number of writers that are awaiting a drain event in .pipe()s\\\\n this.awaitDrain = 0;\\\\n\\\\n // if true, a maybeReadMore has been scheduled\\\\n this.readingMore = false;\\\\n\\\\n this.decoder = null;\\\\n this.encoding = null;\\\\n if (options.encoding) {\\\\n if (!StringDecoder) StringDecoder = __webpack_require__(/*! string_decoder/ */ \\\\\\\"./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js\\\\\\\").StringDecoder;\\\\n this.decoder = new StringDecoder(options.encoding);\\\\n this.encoding = options.encoding;\\\\n }\\\\n}\\\\n\\\\nfunction Readable(options) {\\\\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n\\\\n if (!(this instanceof Readable)) return new Readable(options);\\\\n\\\\n this._readableState = new ReadableState(options, this);\\\\n\\\\n // legacy\\\\n this.readable = true;\\\\n\\\\n if (options) {\\\\n if (typeof options.read === 'function') this._read = options.read;\\\\n\\\\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\\\\n }\\\\n\\\\n Stream.call(this);\\\\n}\\\\n\\\\nObject.defineProperty(Readable.prototype, 'destroyed', {\\\\n get: function () {\\\\n if (this._readableState === undefined) {\\\\n return false;\\\\n }\\\\n return this._readableState.destroyed;\\\\n },\\\\n set: function (value) {\\\\n // we ignore the value if the stream\\\\n // has not been initialized yet\\\\n if (!this._readableState) {\\\\n return;\\\\n }\\\\n\\\\n // backward compatibility, the user is explicitly\\\\n // managing destroyed\\\\n this._readableState.destroyed = value;\\\\n }\\\\n});\\\\n\\\\nReadable.prototype.destroy = destroyImpl.destroy;\\\\nReadable.prototype._undestroy = destroyImpl.undestroy;\\\\nReadable.prototype._destroy = function (err, cb) {\\\\n this.push(null);\\\\n cb(err);\\\\n};\\\\n\\\\n// Manually shove something into the read() buffer.\\\\n// This returns true if the highWaterMark has not been hit yet,\\\\n// similar to how Writable.write() returns true if you should\\\\n// write() some more.\\\\nReadable.prototype.push = function (chunk, encoding) {\\\\n var state = this._readableState;\\\\n var skipChunkCheck;\\\\n\\\\n if (!state.objectMode) {\\\\n if (typeof chunk === 'string') {\\\\n encoding = encoding || state.defaultEncoding;\\\\n if (encoding !== state.encoding) {\\\\n chunk = Buffer.from(chunk, encoding);\\\\n encoding = '';\\\\n }\\\\n skipChunkCheck = true;\\\\n }\\\\n } else {\\\\n skipChunkCheck = true;\\\\n }\\\\n\\\\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\\\\n};\\\\n\\\\n// Unshift should *always* be something directly out of read()\\\\nReadable.prototype.unshift = function (chunk) {\\\\n return readableAddChunk(this, chunk, null, true, false);\\\\n};\\\\n\\\\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\\\\n var state = stream._readableState;\\\\n if (chunk === null) {\\\\n state.reading = false;\\\\n onEofChunk(stream, state);\\\\n } else {\\\\n var er;\\\\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\\\\n if (er) {\\\\n stream.emit('error', er);\\\\n } else if (state.objectMode || chunk && chunk.length > 0) {\\\\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\\\\n chunk = _uint8ArrayToBuffer(chunk);\\\\n }\\\\n\\\\n if (addToFront) {\\\\n if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);\\\\n } else if (state.ended) {\\\\n stream.emit('error', new Error('stream.push() after EOF'));\\\\n } else {\\\\n state.reading = false;\\\\n if (state.decoder && !encoding) {\\\\n chunk = state.decoder.write(chunk);\\\\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\\\\n } else {\\\\n addChunk(stream, state, chunk, false);\\\\n }\\\\n }\\\\n } else if (!addToFront) {\\\\n state.reading = false;\\\\n }\\\\n }\\\\n\\\\n return needMoreData(state);\\\\n}\\\\n\\\\nfunction addChunk(stream, state, chunk, addToFront) {\\\\n if (state.flowing && state.length === 0 && !state.sync) {\\\\n stream.emit('data', chunk);\\\\n stream.read(0);\\\\n } else {\\\\n // update the buffer info.\\\\n state.length += state.objectMode ? 1 : chunk.length;\\\\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\\\\n\\\\n if (state.needReadable) emitReadable(stream);\\\\n }\\\\n maybeReadMore(stream, state);\\\\n}\\\\n\\\\nfunction chunkInvalid(state, chunk) {\\\\n var er;\\\\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\\\\n er = new TypeError('Invalid non-string/buffer chunk');\\\\n }\\\\n return er;\\\\n}\\\\n\\\\n// if it's past the high water mark, we can push in some more.\\\\n// Also, if we have no data yet, we can stand some\\\\n// more bytes. This is to work around cases where hwm=0,\\\\n// such as the repl. Also, if the push() triggered a\\\\n// readable event, and the user called read(largeNumber) such that\\\\n// needReadable was set, then we ought to push more, so that another\\\\n// 'readable' event will be triggered.\\\\nfunction needMoreData(state) {\\\\n return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);\\\\n}\\\\n\\\\nReadable.prototype.isPaused = function () {\\\\n return this._readableState.flowing === false;\\\\n};\\\\n\\\\n// backwards compatibility.\\\\nReadable.prototype.setEncoding = function (enc) {\\\\n if (!StringDecoder) StringDecoder = __webpack_require__(/*! string_decoder/ */ \\\\\\\"./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js\\\\\\\").StringDecoder;\\\\n this._readableState.decoder = new StringDecoder(enc);\\\\n this._readableState.encoding = enc;\\\\n return this;\\\\n};\\\\n\\\\n// Don't raise the hwm > 8MB\\\\nvar MAX_HWM = 0x800000;\\\\nfunction computeNewHighWaterMark(n) {\\\\n if (n >= MAX_HWM) {\\\\n n = MAX_HWM;\\\\n } else {\\\\n // Get the next highest power of 2 to prevent increasing hwm excessively in\\\\n // tiny amounts\\\\n n--;\\\\n n |= n >>> 1;\\\\n n |= n >>> 2;\\\\n n |= n >>> 4;\\\\n n |= n >>> 8;\\\\n n |= n >>> 16;\\\\n n++;\\\\n }\\\\n return n;\\\\n}\\\\n\\\\n// This function is designed to be inlinable, so please take care when making\\\\n// changes to the function body.\\\\nfunction howMuchToRead(n, state) {\\\\n if (n <= 0 || state.length === 0 && state.ended) return 0;\\\\n if (state.objectMode) return 1;\\\\n if (n !== n) {\\\\n // Only flow one buffer at a time\\\\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\\\\n }\\\\n // If we're asking for more than the current hwm, then raise the hwm.\\\\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\\\\n if (n <= state.length) return n;\\\\n // Don't have enough\\\\n if (!state.ended) {\\\\n state.needReadable = true;\\\\n return 0;\\\\n }\\\\n return state.length;\\\\n}\\\\n\\\\n// you can override either this method, or the async _read(n) below.\\\\nReadable.prototype.read = function (n) {\\\\n debug('read', n);\\\\n n = parseInt(n, 10);\\\\n var state = this._readableState;\\\\n var nOrig = n;\\\\n\\\\n if (n !== 0) state.emittedReadable = false;\\\\n\\\\n // if we're doing read(0) to trigger a readable event, but we\\\\n // already have a bunch of data in the buffer, then just trigger\\\\n // the 'readable' event and move on.\\\\n if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {\\\\n debug('read: emitReadable', state.length, state.ended);\\\\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\\\\n return null;\\\\n }\\\\n\\\\n n = howMuchToRead(n, state);\\\\n\\\\n // if we've ended, and we're now clear, then finish it up.\\\\n if (n === 0 && state.ended) {\\\\n if (state.length === 0) endReadable(this);\\\\n return null;\\\\n }\\\\n\\\\n // All the actual chunk generation logic needs to be\\\\n // *below* the call to _read. The reason is that in certain\\\\n // synthetic stream cases, such as passthrough streams, _read\\\\n // may be a completely synchronous operation which may change\\\\n // the state of the read buffer, providing enough data when\\\\n // before there was *not* enough.\\\\n //\\\\n // So, the steps are:\\\\n // 1. Figure out what the state of things will be after we do\\\\n // a read from the buffer.\\\\n //\\\\n // 2. If that resulting state will trigger a _read, then call _read.\\\\n // Note that this may be asynchronous, or synchronous. Yes, it is\\\\n // deeply ugly to write APIs this way, but that still doesn't mean\\\\n // that the Readable class should behave improperly, as streams are\\\\n // designed to be sync/async agnostic.\\\\n // Take note if the _read call is sync or async (ie, if the read call\\\\n // has returned yet), so that we know whether or not it's safe to emit\\\\n // 'readable' etc.\\\\n //\\\\n // 3. Actually pull the requested chunks out of the buffer and return.\\\\n\\\\n // if we need a readable event, then we need to do some reading.\\\\n var doRead = state.needReadable;\\\\n debug('need readable', doRead);\\\\n\\\\n // if we currently have less than the highWaterMark, then also read some\\\\n if (state.length === 0 || state.length - n < state.highWaterMark) {\\\\n doRead = true;\\\\n debug('length less than watermark', doRead);\\\\n }\\\\n\\\\n // however, if we've ended, then there's no point, and if we're already\\\\n // reading, then it's unnecessary.\\\\n if (state.ended || state.reading) {\\\\n doRead = false;\\\\n debug('reading or ended', doRead);\\\\n } else if (doRead) {\\\\n debug('do read');\\\\n state.reading = true;\\\\n state.sync = true;\\\\n // if the length is currently zero, then we *need* a readable event.\\\\n if (state.length === 0) state.needReadable = true;\\\\n // call internal read method\\\\n this._read(state.highWaterMark);\\\\n state.sync = false;\\\\n // If _read pushed data synchronously, then `reading` will be false,\\\\n // and we need to re-evaluate how much data we can return to the user.\\\\n if (!state.reading) n = howMuchToRead(nOrig, state);\\\\n }\\\\n\\\\n var ret;\\\\n if (n > 0) ret = fromList(n, state);else ret = null;\\\\n\\\\n if (ret === null) {\\\\n state.needReadable = true;\\\\n n = 0;\\\\n } else {\\\\n state.length -= n;\\\\n }\\\\n\\\\n if (state.length === 0) {\\\\n // If we have nothing in the buffer, then we want to know\\\\n // as soon as we *do* get something into the buffer.\\\\n if (!state.ended) state.needReadable = true;\\\\n\\\\n // If we tried to read() past the EOF, then emit end on the next tick.\\\\n if (nOrig !== n && state.ended) endReadable(this);\\\\n }\\\\n\\\\n if (ret !== null) this.emit('data', ret);\\\\n\\\\n return ret;\\\\n};\\\\n\\\\nfunction onEofChunk(stream, state) {\\\\n if (state.ended) return;\\\\n if (state.decoder) {\\\\n var chunk = state.decoder.end();\\\\n if (chunk && chunk.length) {\\\\n state.buffer.push(chunk);\\\\n state.length += state.objectMode ? 1 : chunk.length;\\\\n }\\\\n }\\\\n state.ended = true;\\\\n\\\\n // emit 'readable' now to make sure it gets picked up.\\\\n emitReadable(stream);\\\\n}\\\\n\\\\n// Don't emit readable right away in sync mode, because this can trigger\\\\n// another read() call => stack overflow. This way, it might trigger\\\\n// a nextTick recursion warning, but that's not so bad.\\\\nfunction emitReadable(stream) {\\\\n var state = stream._readableState;\\\\n state.needReadable = false;\\\\n if (!state.emittedReadable) {\\\\n debug('emitReadable', state.flowing);\\\\n state.emittedReadable = true;\\\\n if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);\\\\n }\\\\n}\\\\n\\\\nfunction emitReadable_(stream) {\\\\n debug('emit readable');\\\\n stream.emit('readable');\\\\n flow(stream);\\\\n}\\\\n\\\\n// at this point, the user has presumably seen the 'readable' event,\\\\n// and called read() to consume some data. that may have triggered\\\\n// in turn another _read(n) call, in which case reading = true if\\\\n// it's in progress.\\\\n// However, if we're not ended, or reading, and the length < hwm,\\\\n// then go ahead and try to read some more preemptively.\\\\nfunction maybeReadMore(stream, state) {\\\\n if (!state.readingMore) {\\\\n state.readingMore = true;\\\\n pna.nextTick(maybeReadMore_, stream, state);\\\\n }\\\\n}\\\\n\\\\nfunction maybeReadMore_(stream, state) {\\\\n var len = state.length;\\\\n while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {\\\\n debug('maybeReadMore read 0');\\\\n stream.read(0);\\\\n if (len === state.length)\\\\n // didn't get any data, stop spinning.\\\\n break;else len = state.length;\\\\n }\\\\n state.readingMore = false;\\\\n}\\\\n\\\\n// abstract method. to be overridden in specific implementation classes.\\\\n// call cb(er, data) where data is <= n in length.\\\\n// for virtual (non-string, non-buffer) streams, \\\\\\\"length\\\\\\\" is somewhat\\\\n// arbitrary, and perhaps not very meaningful.\\\\nReadable.prototype._read = function (n) {\\\\n this.emit('error', new Error('_read() is not implemented'));\\\\n};\\\\n\\\\nReadable.prototype.pipe = function (dest, pipeOpts) {\\\\n var src = this;\\\\n var state = this._readableState;\\\\n\\\\n switch (state.pipesCount) {\\\\n case 0:\\\\n state.pipes = dest;\\\\n break;\\\\n case 1:\\\\n state.pipes = [state.pipes, dest];\\\\n break;\\\\n default:\\\\n state.pipes.push(dest);\\\\n break;\\\\n }\\\\n state.pipesCount += 1;\\\\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\\\\n\\\\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\\\\n\\\\n var endFn = doEnd ? onend : unpipe;\\\\n if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);\\\\n\\\\n dest.on('unpipe', onunpipe);\\\\n function onunpipe(readable, unpipeInfo) {\\\\n debug('onunpipe');\\\\n if (readable === src) {\\\\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\\\\n unpipeInfo.hasUnpiped = true;\\\\n cleanup();\\\\n }\\\\n }\\\\n }\\\\n\\\\n function onend() {\\\\n debug('onend');\\\\n dest.end();\\\\n }\\\\n\\\\n // when the dest drains, it reduces the awaitDrain counter\\\\n // on the source. This would be more elegant with a .once()\\\\n // handler in flow(), but adding and removing repeatedly is\\\\n // too slow.\\\\n var ondrain = pipeOnDrain(src);\\\\n dest.on('drain', ondrain);\\\\n\\\\n var cleanedUp = false;\\\\n function cleanup() {\\\\n debug('cleanup');\\\\n // cleanup event handlers once the pipe is broken\\\\n dest.removeListener('close', onclose);\\\\n dest.removeListener('finish', onfinish);\\\\n dest.removeListener('drain', ondrain);\\\\n dest.removeListener('error', onerror);\\\\n dest.removeListener('unpipe', onunpipe);\\\\n src.removeListener('end', onend);\\\\n src.removeListener('end', unpipe);\\\\n src.removeListener('data', ondata);\\\\n\\\\n cleanedUp = true;\\\\n\\\\n // if the reader is waiting for a drain event from this\\\\n // specific writer, then it would cause it to never start\\\\n // flowing again.\\\\n // So, if this is awaiting a drain, then we just call it now.\\\\n // If we don't know, then assume that we are waiting for one.\\\\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\\\\n }\\\\n\\\\n // If the user pushes more data while we're writing to dest then we'll end up\\\\n // in ondata again. However, we only want to increase awaitDrain once because\\\\n // dest will only emit one 'drain' event for the multiple writes.\\\\n // => Introduce a guard on increasing awaitDrain.\\\\n var increasedAwaitDrain = false;\\\\n src.on('data', ondata);\\\\n function ondata(chunk) {\\\\n debug('ondata');\\\\n increasedAwaitDrain = false;\\\\n var ret = dest.write(chunk);\\\\n if (false === ret && !increasedAwaitDrain) {\\\\n // If the user unpiped during `dest.write()`, it is possible\\\\n // to get stuck in a permanently paused state if that write\\\\n // also returned false.\\\\n // => Check whether `dest` is still a piping destination.\\\\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\\\\n debug('false write response, pause', src._readableState.awaitDrain);\\\\n src._readableState.awaitDrain++;\\\\n increasedAwaitDrain = true;\\\\n }\\\\n src.pause();\\\\n }\\\\n }\\\\n\\\\n // if the dest has an error, then stop piping into it.\\\\n // however, don't suppress the throwing behavior for this.\\\\n function onerror(er) {\\\\n debug('onerror', er);\\\\n unpipe();\\\\n dest.removeListener('error', onerror);\\\\n if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);\\\\n }\\\\n\\\\n // Make sure our error handler is attached before userland ones.\\\\n prependListener(dest, 'error', onerror);\\\\n\\\\n // Both close and finish should trigger unpipe, but only once.\\\\n function onclose() {\\\\n dest.removeListener('finish', onfinish);\\\\n unpipe();\\\\n }\\\\n dest.once('close', onclose);\\\\n function onfinish() {\\\\n debug('onfinish');\\\\n dest.removeListener('close', onclose);\\\\n unpipe();\\\\n }\\\\n dest.once('finish', onfinish);\\\\n\\\\n function unpipe() {\\\\n debug('unpipe');\\\\n src.unpipe(dest);\\\\n }\\\\n\\\\n // tell the dest that it's being piped to\\\\n dest.emit('pipe', src);\\\\n\\\\n // start the flow if it hasn't been started already.\\\\n if (!state.flowing) {\\\\n debug('pipe resume');\\\\n src.resume();\\\\n }\\\\n\\\\n return dest;\\\\n};\\\\n\\\\nfunction pipeOnDrain(src) {\\\\n return function () {\\\\n var state = src._readableState;\\\\n debug('pipeOnDrain', state.awaitDrain);\\\\n if (state.awaitDrain) state.awaitDrain--;\\\\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\\\\n state.flowing = true;\\\\n flow(src);\\\\n }\\\\n };\\\\n}\\\\n\\\\nReadable.prototype.unpipe = function (dest) {\\\\n var state = this._readableState;\\\\n var unpipeInfo = { hasUnpiped: false };\\\\n\\\\n // if we're not piping anywhere, then do nothing.\\\\n if (state.pipesCount === 0) return this;\\\\n\\\\n // just one destination. most common case.\\\\n if (state.pipesCount === 1) {\\\\n // passed in one, but it's not the right one.\\\\n if (dest && dest !== state.pipes) return this;\\\\n\\\\n if (!dest) dest = state.pipes;\\\\n\\\\n // got a match.\\\\n state.pipes = null;\\\\n state.pipesCount = 0;\\\\n state.flowing = false;\\\\n if (dest) dest.emit('unpipe', this, unpipeInfo);\\\\n return this;\\\\n }\\\\n\\\\n // slow case. multiple pipe destinations.\\\\n\\\\n if (!dest) {\\\\n // remove all.\\\\n var dests = state.pipes;\\\\n var len = state.pipesCount;\\\\n state.pipes = null;\\\\n state.pipesCount = 0;\\\\n state.flowing = false;\\\\n\\\\n for (var i = 0; i < len; i++) {\\\\n dests[i].emit('unpipe', this, unpipeInfo);\\\\n }return this;\\\\n }\\\\n\\\\n // try to find the right one.\\\\n var index = indexOf(state.pipes, dest);\\\\n if (index === -1) return this;\\\\n\\\\n state.pipes.splice(index, 1);\\\\n state.pipesCount -= 1;\\\\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\\\\n\\\\n dest.emit('unpipe', this, unpipeInfo);\\\\n\\\\n return this;\\\\n};\\\\n\\\\n// set up data events if they are asked for\\\\n// Ensure readable listeners eventually get something\\\\nReadable.prototype.on = function (ev, fn) {\\\\n var res = Stream.prototype.on.call(this, ev, fn);\\\\n\\\\n if (ev === 'data') {\\\\n // Start flowing on next tick if stream isn't explicitly paused\\\\n if (this._readableState.flowing !== false) this.resume();\\\\n } else if (ev === 'readable') {\\\\n var state = this._readableState;\\\\n if (!state.endEmitted && !state.readableListening) {\\\\n state.readableListening = state.needReadable = true;\\\\n state.emittedReadable = false;\\\\n if (!state.reading) {\\\\n pna.nextTick(nReadingNextTick, this);\\\\n } else if (state.length) {\\\\n emitReadable(this);\\\\n }\\\\n }\\\\n }\\\\n\\\\n return res;\\\\n};\\\\nReadable.prototype.addListener = Readable.prototype.on;\\\\n\\\\nfunction nReadingNextTick(self) {\\\\n debug('readable nexttick read 0');\\\\n self.read(0);\\\\n}\\\\n\\\\n// pause() and resume() are remnants of the legacy readable stream API\\\\n// If the user uses them, then switch into old mode.\\\\nReadable.prototype.resume = function () {\\\\n var state = this._readableState;\\\\n if (!state.flowing) {\\\\n debug('resume');\\\\n state.flowing = true;\\\\n resume(this, state);\\\\n }\\\\n return this;\\\\n};\\\\n\\\\nfunction resume(stream, state) {\\\\n if (!state.resumeScheduled) {\\\\n state.resumeScheduled = true;\\\\n pna.nextTick(resume_, stream, state);\\\\n }\\\\n}\\\\n\\\\nfunction resume_(stream, state) {\\\\n if (!state.reading) {\\\\n debug('resume read 0');\\\\n stream.read(0);\\\\n }\\\\n\\\\n state.resumeScheduled = false;\\\\n state.awaitDrain = 0;\\\\n stream.emit('resume');\\\\n flow(stream);\\\\n if (state.flowing && !state.reading) stream.read(0);\\\\n}\\\\n\\\\nReadable.prototype.pause = function () {\\\\n debug('call pause flowing=%j', this._readableState.flowing);\\\\n if (false !== this._readableState.flowing) {\\\\n debug('pause');\\\\n this._readableState.flowing = false;\\\\n this.emit('pause');\\\\n }\\\\n return this;\\\\n};\\\\n\\\\nfunction flow(stream) {\\\\n var state = stream._readableState;\\\\n debug('flow', state.flowing);\\\\n while (state.flowing && stream.read() !== null) {}\\\\n}\\\\n\\\\n// wrap an old-style stream as the async data source.\\\\n// This is *not* part of the readable stream interface.\\\\n// It is an ugly unfortunate mess of history.\\\\nReadable.prototype.wrap = function (stream) {\\\\n var _this = this;\\\\n\\\\n var state = this._readableState;\\\\n var paused = false;\\\\n\\\\n stream.on('end', function () {\\\\n debug('wrapped end');\\\\n if (state.decoder && !state.ended) {\\\\n var chunk = state.decoder.end();\\\\n if (chunk && chunk.length) _this.push(chunk);\\\\n }\\\\n\\\\n _this.push(null);\\\\n });\\\\n\\\\n stream.on('data', function (chunk) {\\\\n debug('wrapped data');\\\\n if (state.decoder) chunk = state.decoder.write(chunk);\\\\n\\\\n // don't skip over falsy values in objectMode\\\\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\\\\n\\\\n var ret = _this.push(chunk);\\\\n if (!ret) {\\\\n paused = true;\\\\n stream.pause();\\\\n }\\\\n });\\\\n\\\\n // proxy all the other methods.\\\\n // important when wrapping filters and duplexes.\\\\n for (var i in stream) {\\\\n if (this[i] === undefined && typeof stream[i] === 'function') {\\\\n this[i] = function (method) {\\\\n return function () {\\\\n return stream[method].apply(stream, arguments);\\\\n };\\\\n }(i);\\\\n }\\\\n }\\\\n\\\\n // proxy certain important events.\\\\n for (var n = 0; n < kProxyEvents.length; n++) {\\\\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\\\\n }\\\\n\\\\n // when we try to consume some more bytes, simply unpause the\\\\n // underlying stream.\\\\n this._read = function (n) {\\\\n debug('wrapped _read', n);\\\\n if (paused) {\\\\n paused = false;\\\\n stream.resume();\\\\n }\\\\n };\\\\n\\\\n return this;\\\\n};\\\\n\\\\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\\\\n // making it explicit this property is not enumerable\\\\n // because otherwise some prototype manipulation in\\\\n // userland will fail\\\\n enumerable: false,\\\\n get: function () {\\\\n return this._readableState.highWaterMark;\\\\n }\\\\n});\\\\n\\\\n// exposed for testing purposes only.\\\\nReadable._fromList = fromList;\\\\n\\\\n// Pluck off n bytes from an array of buffers.\\\\n// Length is the combined lengths of all the buffers in the list.\\\\n// This function is designed to be inlinable, so please take care when making\\\\n// changes to the function body.\\\\nfunction fromList(n, state) {\\\\n // nothing buffered\\\\n if (state.length === 0) return null;\\\\n\\\\n var ret;\\\\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\\\\n // read it all, truncate the list\\\\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);\\\\n state.buffer.clear();\\\\n } else {\\\\n // read part of list\\\\n ret = fromListPartial(n, state.buffer, state.decoder);\\\\n }\\\\n\\\\n return ret;\\\\n}\\\\n\\\\n// Extracts only enough buffered data to satisfy the amount requested.\\\\n// This function is designed to be inlinable, so please take care when making\\\\n// changes to the function body.\\\\nfunction fromListPartial(n, list, hasStrings) {\\\\n var ret;\\\\n if (n < list.head.data.length) {\\\\n // slice is the same for buffers and strings\\\\n ret = list.head.data.slice(0, n);\\\\n list.head.data = list.head.data.slice(n);\\\\n } else if (n === list.head.data.length) {\\\\n // first chunk is a perfect match\\\\n ret = list.shift();\\\\n } else {\\\\n // result spans more than one buffer\\\\n ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);\\\\n }\\\\n return ret;\\\\n}\\\\n\\\\n// Copies a specified amount of characters from the list of buffered data\\\\n// chunks.\\\\n// This function is designed to be inlinable, so please take care when making\\\\n// changes to the function body.\\\\nfunction copyFromBufferString(n, list) {\\\\n var p = list.head;\\\\n var c = 1;\\\\n var ret = p.data;\\\\n n -= ret.length;\\\\n while (p = p.next) {\\\\n var str = p.data;\\\\n var nb = n > str.length ? str.length : n;\\\\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\\\\n n -= nb;\\\\n if (n === 0) {\\\\n if (nb === str.length) {\\\\n ++c;\\\\n if (p.next) list.head = p.next;else list.head = list.tail = null;\\\\n } else {\\\\n list.head = p;\\\\n p.data = str.slice(nb);\\\\n }\\\\n break;\\\\n }\\\\n ++c;\\\\n }\\\\n list.length -= c;\\\\n return ret;\\\\n}\\\\n\\\\n// Copies a specified amount of bytes from the list of buffered data chunks.\\\\n// This function is designed to be inlinable, so please take care when making\\\\n// changes to the function body.\\\\nfunction copyFromBuffer(n, list) {\\\\n var ret = Buffer.allocUnsafe(n);\\\\n var p = list.head;\\\\n var c = 1;\\\\n p.data.copy(ret);\\\\n n -= p.data.length;\\\\n while (p = p.next) {\\\\n var buf = p.data;\\\\n var nb = n > buf.length ? buf.length : n;\\\\n buf.copy(ret, ret.length - n, 0, nb);\\\\n n -= nb;\\\\n if (n === 0) {\\\\n if (nb === buf.length) {\\\\n ++c;\\\\n if (p.next) list.head = p.next;else list.head = list.tail = null;\\\\n } else {\\\\n list.head = p;\\\\n p.data = buf.slice(nb);\\\\n }\\\\n break;\\\\n }\\\\n ++c;\\\\n }\\\\n list.length -= c;\\\\n return ret;\\\\n}\\\\n\\\\nfunction endReadable(stream) {\\\\n var state = stream._readableState;\\\\n\\\\n // If we get here before consuming all the bytes, then that is a\\\\n // bug in node. Should never happen.\\\\n if (state.length > 0) throw new Error('\\\\\\\"endReadable()\\\\\\\" called on non-empty stream');\\\\n\\\\n if (!state.endEmitted) {\\\\n state.ended = true;\\\\n pna.nextTick(endReadableNT, state, stream);\\\\n }\\\\n}\\\\n\\\\nfunction endReadableNT(state, stream) {\\\\n // Check that we didn't get one last unshift.\\\\n if (!state.endEmitted && state.length === 0) {\\\\n state.endEmitted = true;\\\\n stream.readable = false;\\\\n stream.emit('end');\\\\n }\\\\n}\\\\n\\\\nfunction indexOf(xs, x) {\\\\n for (var i = 0, l = xs.length; i < l; i++) {\\\\n if (xs[i] === x) return i;\\\\n }\\\\n return -1;\\\\n}\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_readable.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/_stream_transform.js\\\":\\n/*!***************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/_stream_transform.js ***!\\n \\\\***************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n// a transform stream is a readable/writable stream where you do\\\\n// something with the data. Sometimes it's called a \\\\\\\"filter\\\\\\\",\\\\n// but that's not a great name for it, since that implies a thing where\\\\n// some bits pass through, and others are simply ignored. (That would\\\\n// be a valid example of a transform, of course.)\\\\n//\\\\n// While the output is causally related to the input, it's not a\\\\n// necessarily symmetric or synchronous transformation. For example,\\\\n// a zlib stream might take multiple plain-text writes(), and then\\\\n// emit a single compressed chunk some time in the future.\\\\n//\\\\n// Here's how this works:\\\\n//\\\\n// The Transform stream has all the aspects of the readable and writable\\\\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\\\\n// internally, and returns false if there's a lot of pending writes\\\\n// buffered up. When you call read(), that calls _read(n) until\\\\n// there's enough pending readable data buffered up.\\\\n//\\\\n// In a transform stream, the written data is placed in a buffer. When\\\\n// _read(n) is called, it transforms the queued up data, calling the\\\\n// buffered _write cb's as it consumes chunks. If consuming a single\\\\n// written chunk would result in multiple output chunks, then the first\\\\n// outputted bit calls the readcb, and subsequent chunks just go into\\\\n// the read buffer, and will cause it to emit 'readable' if necessary.\\\\n//\\\\n// This way, back-pressure is actually determined by the reading side,\\\\n// since _read has to be called to start processing a new chunk. However,\\\\n// a pathological inflate type of transform can cause excessive buffering\\\\n// here. For example, imagine a stream where every byte of input is\\\\n// interpreted as an integer from 0-255, and then results in that many\\\\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\\\\n// 1kb of data being output. In this case, you could write a very small\\\\n// amount of input, and end up with a very large amount of output. In\\\\n// such a pathological inflating mechanism, there'd be no way to tell\\\\n// the system to stop doing the transform. A single 4MB write could\\\\n// cause the system to run out of memory.\\\\n//\\\\n// However, even in such a pathological case, only a single written chunk\\\\n// would be consumed, and then the rest would wait (un-transformed) until\\\\n// the results of the previous transformed chunk were consumed.\\\\n\\\\n\\\\n\\\\nmodule.exports = Transform;\\\\n\\\\nvar Duplex = __webpack_require__(/*! ./_stream_duplex */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n\\\\n/*<replacement>*/\\\\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \\\\\\\"./node_modules/core-util-is/lib/util.js\\\\\\\"));\\\\nutil.inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\nutil.inherits(Transform, Duplex);\\\\n\\\\nfunction afterTransform(er, data) {\\\\n var ts = this._transformState;\\\\n ts.transforming = false;\\\\n\\\\n var cb = ts.writecb;\\\\n\\\\n if (!cb) {\\\\n return this.emit('error', new Error('write callback called multiple times'));\\\\n }\\\\n\\\\n ts.writechunk = null;\\\\n ts.writecb = null;\\\\n\\\\n if (data != null) // single equals check for both `null` and `undefined`\\\\n this.push(data);\\\\n\\\\n cb(er);\\\\n\\\\n var rs = this._readableState;\\\\n rs.reading = false;\\\\n if (rs.needReadable || rs.length < rs.highWaterMark) {\\\\n this._read(rs.highWaterMark);\\\\n }\\\\n}\\\\n\\\\nfunction Transform(options) {\\\\n if (!(this instanceof Transform)) return new Transform(options);\\\\n\\\\n Duplex.call(this, options);\\\\n\\\\n this._transformState = {\\\\n afterTransform: afterTransform.bind(this),\\\\n needTransform: false,\\\\n transforming: false,\\\\n writecb: null,\\\\n writechunk: null,\\\\n writeencoding: null\\\\n };\\\\n\\\\n // start out asking for a readable event once data is transformed.\\\\n this._readableState.needReadable = true;\\\\n\\\\n // we have implemented the _read method, and done the other things\\\\n // that Readable wants before the first _read call, so unset the\\\\n // sync guard flag.\\\\n this._readableState.sync = false;\\\\n\\\\n if (options) {\\\\n if (typeof options.transform === 'function') this._transform = options.transform;\\\\n\\\\n if (typeof options.flush === 'function') this._flush = options.flush;\\\\n }\\\\n\\\\n // When the writable side finishes, then flush out anything remaining.\\\\n this.on('prefinish', prefinish);\\\\n}\\\\n\\\\nfunction prefinish() {\\\\n var _this = this;\\\\n\\\\n if (typeof this._flush === 'function') {\\\\n this._flush(function (er, data) {\\\\n done(_this, er, data);\\\\n });\\\\n } else {\\\\n done(this, null, null);\\\\n }\\\\n}\\\\n\\\\nTransform.prototype.push = function (chunk, encoding) {\\\\n this._transformState.needTransform = false;\\\\n return Duplex.prototype.push.call(this, chunk, encoding);\\\\n};\\\\n\\\\n// This is the part where you do stuff!\\\\n// override this function in implementation classes.\\\\n// 'chunk' is an input chunk.\\\\n//\\\\n// Call `push(newChunk)` to pass along transformed output\\\\n// to the readable side. You may call 'push' zero or more times.\\\\n//\\\\n// Call `cb(err)` when you are done with this chunk. If you pass\\\\n// an error, then that'll put the hurt on the whole operation. If you\\\\n// never call cb(), then you'll never get another chunk.\\\\nTransform.prototype._transform = function (chunk, encoding, cb) {\\\\n throw new Error('_transform() is not implemented');\\\\n};\\\\n\\\\nTransform.prototype._write = function (chunk, encoding, cb) {\\\\n var ts = this._transformState;\\\\n ts.writecb = cb;\\\\n ts.writechunk = chunk;\\\\n ts.writeencoding = encoding;\\\\n if (!ts.transforming) {\\\\n var rs = this._readableState;\\\\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\\\\n }\\\\n};\\\\n\\\\n// Doesn't matter what the args are here.\\\\n// _transform does all the work.\\\\n// That we got here means that the readable side wants more data.\\\\nTransform.prototype._read = function (n) {\\\\n var ts = this._transformState;\\\\n\\\\n if (ts.writechunk !== null && ts.writecb && !ts.transforming) {\\\\n ts.transforming = true;\\\\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\\\\n } else {\\\\n // mark that we need a transform, so that any data that comes in\\\\n // will get processed, now that we've asked for it.\\\\n ts.needTransform = true;\\\\n }\\\\n};\\\\n\\\\nTransform.prototype._destroy = function (err, cb) {\\\\n var _this2 = this;\\\\n\\\\n Duplex.prototype._destroy.call(this, err, function (err2) {\\\\n cb(err2);\\\\n _this2.emit('close');\\\\n });\\\\n};\\\\n\\\\nfunction done(stream, er, data) {\\\\n if (er) return stream.emit('error', er);\\\\n\\\\n if (data != null) // single equals check for both `null` and `undefined`\\\\n stream.push(data);\\\\n\\\\n // if there's nothing in the write buffer, then that means\\\\n // that nothing more will ever be provided\\\\n if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');\\\\n\\\\n if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');\\\\n\\\\n return stream.push(null);\\\\n}\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_transform.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/_stream_writable.js\\\":\\n/*!**************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/_stream_writable.js ***!\\n \\\\**************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n// A bit simpler than readable streams.\\\\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\\\\n// the drain event emission and buffering.\\\\n\\\\n\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar pna = __webpack_require__(/*! process-nextick-args */ \\\\\\\"./node_modules/process-nextick-args/index.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\nmodule.exports = Writable;\\\\n\\\\n/* <replacement> */\\\\nfunction WriteReq(chunk, encoding, cb) {\\\\n this.chunk = chunk;\\\\n this.encoding = encoding;\\\\n this.callback = cb;\\\\n this.next = null;\\\\n}\\\\n\\\\n// It seems a linked list but it is not\\\\n// there will be only 2 of these for each stream\\\\nfunction CorkedRequest(state) {\\\\n var _this = this;\\\\n\\\\n this.next = null;\\\\n this.entry = null;\\\\n this.finish = function () {\\\\n onCorkedFinish(_this, state);\\\\n };\\\\n}\\\\n/* </replacement> */\\\\n\\\\n/*<replacement>*/\\\\nvar asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar Duplex;\\\\n/*</replacement>*/\\\\n\\\\nWritable.WritableState = WritableState;\\\\n\\\\n/*<replacement>*/\\\\nvar util = Object.create(__webpack_require__(/*! core-util-is */ \\\\\\\"./node_modules/core-util-is/lib/util.js\\\\\\\"));\\\\nutil.inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar internalUtil = {\\\\n deprecate: __webpack_require__(/*! util-deprecate */ \\\\\\\"./node_modules/util-deprecate/node.js\\\\\\\")\\\\n};\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\nvar Stream = __webpack_require__(/*! ./internal/streams/stream */ \\\\\\\"./node_modules/readable-stream/lib/internal/streams/stream.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar Buffer = __webpack_require__(/*! safe-buffer */ \\\\\\\"./node_modules/readable-stream/node_modules/safe-buffer/index.js\\\\\\\").Buffer;\\\\nvar OurUint8Array = global.Uint8Array || function () {};\\\\nfunction _uint8ArrayToBuffer(chunk) {\\\\n return Buffer.from(chunk);\\\\n}\\\\nfunction _isUint8Array(obj) {\\\\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\\\\n}\\\\n\\\\n/*</replacement>*/\\\\n\\\\nvar destroyImpl = __webpack_require__(/*! ./internal/streams/destroy */ \\\\\\\"./node_modules/readable-stream/lib/internal/streams/destroy.js\\\\\\\");\\\\n\\\\nutil.inherits(Writable, Stream);\\\\n\\\\nfunction nop() {}\\\\n\\\\nfunction WritableState(options, stream) {\\\\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n\\\\n options = options || {};\\\\n\\\\n // Duplex streams are both readable and writable, but share\\\\n // the same options object.\\\\n // However, some cases require setting options to different\\\\n // values for the readable and the writable sides of the duplex stream.\\\\n // These options can be provided separately as readableXXX and writableXXX.\\\\n var isDuplex = stream instanceof Duplex;\\\\n\\\\n // object stream flag to indicate whether or not this stream\\\\n // contains buffers or objects.\\\\n this.objectMode = !!options.objectMode;\\\\n\\\\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\\\\n\\\\n // the point at which write() starts returning false\\\\n // Note: 0 is a valid value, means that we always return false if\\\\n // the entire buffer is not flushed immediately on write()\\\\n var hwm = options.highWaterMark;\\\\n var writableHwm = options.writableHighWaterMark;\\\\n var defaultHwm = this.objectMode ? 16 : 16 * 1024;\\\\n\\\\n if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;\\\\n\\\\n // cast to ints.\\\\n this.highWaterMark = Math.floor(this.highWaterMark);\\\\n\\\\n // if _final has been called\\\\n this.finalCalled = false;\\\\n\\\\n // drain event flag.\\\\n this.needDrain = false;\\\\n // at the start of calling end()\\\\n this.ending = false;\\\\n // when end() has been called, and returned\\\\n this.ended = false;\\\\n // when 'finish' is emitted\\\\n this.finished = false;\\\\n\\\\n // has it been destroyed\\\\n this.destroyed = false;\\\\n\\\\n // should we decode strings into buffers before passing to _write?\\\\n // this is here so that some node-core streams can optimize string\\\\n // handling at a lower level.\\\\n var noDecode = options.decodeStrings === false;\\\\n this.decodeStrings = !noDecode;\\\\n\\\\n // Crypto is kind of old and crusty. Historically, its default string\\\\n // encoding is 'binary' so we have to make this configurable.\\\\n // Everything else in the universe uses 'utf8', though.\\\\n this.defaultEncoding = options.defaultEncoding || 'utf8';\\\\n\\\\n // not an actual buffer we keep track of, but a measurement\\\\n // of how much we're waiting to get pushed to some underlying\\\\n // socket or file.\\\\n this.length = 0;\\\\n\\\\n // a flag to see when we're in the middle of a write.\\\\n this.writing = false;\\\\n\\\\n // when true all writes will be buffered until .uncork() call\\\\n this.corked = 0;\\\\n\\\\n // a flag to be able to tell if the onwrite cb is called immediately,\\\\n // or on a later tick. We set this to true at first, because any\\\\n // actions that shouldn't happen until \\\\\\\"later\\\\\\\" should generally also\\\\n // not happen before the first write call.\\\\n this.sync = true;\\\\n\\\\n // a flag to know if we're processing previously buffered items, which\\\\n // may call the _write() callback in the same tick, so that we don't\\\\n // end up in an overlapped onwrite situation.\\\\n this.bufferProcessing = false;\\\\n\\\\n // the callback that's passed to _write(chunk,cb)\\\\n this.onwrite = function (er) {\\\\n onwrite(stream, er);\\\\n };\\\\n\\\\n // the callback that the user supplies to write(chunk,encoding,cb)\\\\n this.writecb = null;\\\\n\\\\n // the amount that is being written when _write is called.\\\\n this.writelen = 0;\\\\n\\\\n this.bufferedRequest = null;\\\\n this.lastBufferedRequest = null;\\\\n\\\\n // number of pending user-supplied write callbacks\\\\n // this must be 0 before 'finish' can be emitted\\\\n this.pendingcb = 0;\\\\n\\\\n // emit prefinish if the only thing we're waiting for is _write cbs\\\\n // This is relevant for synchronous Transform streams\\\\n this.prefinished = false;\\\\n\\\\n // True if the error was already emitted and should not be thrown again\\\\n this.errorEmitted = false;\\\\n\\\\n // count buffered requests\\\\n this.bufferedRequestCount = 0;\\\\n\\\\n // allocate the first CorkedRequest, there is always\\\\n // one allocated and free to use, and we maintain at most two\\\\n this.corkedRequestsFree = new CorkedRequest(this);\\\\n}\\\\n\\\\nWritableState.prototype.getBuffer = function getBuffer() {\\\\n var current = this.bufferedRequest;\\\\n var out = [];\\\\n while (current) {\\\\n out.push(current);\\\\n current = current.next;\\\\n }\\\\n return out;\\\\n};\\\\n\\\\n(function () {\\\\n try {\\\\n Object.defineProperty(WritableState.prototype, 'buffer', {\\\\n get: internalUtil.deprecate(function () {\\\\n return this.getBuffer();\\\\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\\\\n });\\\\n } catch (_) {}\\\\n})();\\\\n\\\\n// Test _writableState for inheritance to account for Duplex streams,\\\\n// whose prototype chain only points to Readable.\\\\nvar realHasInstance;\\\\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\\\\n realHasInstance = Function.prototype[Symbol.hasInstance];\\\\n Object.defineProperty(Writable, Symbol.hasInstance, {\\\\n value: function (object) {\\\\n if (realHasInstance.call(this, object)) return true;\\\\n if (this !== Writable) return false;\\\\n\\\\n return object && object._writableState instanceof WritableState;\\\\n }\\\\n });\\\\n} else {\\\\n realHasInstance = function (object) {\\\\n return object instanceof this;\\\\n };\\\\n}\\\\n\\\\nfunction Writable(options) {\\\\n Duplex = Duplex || __webpack_require__(/*! ./_stream_duplex */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n\\\\n // Writable ctor is applied to Duplexes, too.\\\\n // `realHasInstance` is necessary because using plain `instanceof`\\\\n // would return false, as no `_writableState` property is attached.\\\\n\\\\n // Trying to use the custom `instanceof` for Writable here will also break the\\\\n // Node.js LazyTransform implementation, which has a non-trivial getter for\\\\n // `_writableState` that would lead to infinite recursion.\\\\n if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {\\\\n return new Writable(options);\\\\n }\\\\n\\\\n this._writableState = new WritableState(options, this);\\\\n\\\\n // legacy.\\\\n this.writable = true;\\\\n\\\\n if (options) {\\\\n if (typeof options.write === 'function') this._write = options.write;\\\\n\\\\n if (typeof options.writev === 'function') this._writev = options.writev;\\\\n\\\\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\\\\n\\\\n if (typeof options.final === 'function') this._final = options.final;\\\\n }\\\\n\\\\n Stream.call(this);\\\\n}\\\\n\\\\n// Otherwise people can pipe Writable streams, which is just wrong.\\\\nWritable.prototype.pipe = function () {\\\\n this.emit('error', new Error('Cannot pipe, not readable'));\\\\n};\\\\n\\\\nfunction writeAfterEnd(stream, cb) {\\\\n var er = new Error('write after end');\\\\n // TODO: defer error events consistently everywhere, not just the cb\\\\n stream.emit('error', er);\\\\n pna.nextTick(cb, er);\\\\n}\\\\n\\\\n// Checks that a user-supplied chunk is valid, especially for the particular\\\\n// mode the stream is in. Currently this means that `null` is never accepted\\\\n// and undefined/non-string values are only allowed in object mode.\\\\nfunction validChunk(stream, state, chunk, cb) {\\\\n var valid = true;\\\\n var er = false;\\\\n\\\\n if (chunk === null) {\\\\n er = new TypeError('May not write null values to stream');\\\\n } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\\\\n er = new TypeError('Invalid non-string/buffer chunk');\\\\n }\\\\n if (er) {\\\\n stream.emit('error', er);\\\\n pna.nextTick(cb, er);\\\\n valid = false;\\\\n }\\\\n return valid;\\\\n}\\\\n\\\\nWritable.prototype.write = function (chunk, encoding, cb) {\\\\n var state = this._writableState;\\\\n var ret = false;\\\\n var isBuf = !state.objectMode && _isUint8Array(chunk);\\\\n\\\\n if (isBuf && !Buffer.isBuffer(chunk)) {\\\\n chunk = _uint8ArrayToBuffer(chunk);\\\\n }\\\\n\\\\n if (typeof encoding === 'function') {\\\\n cb = encoding;\\\\n encoding = null;\\\\n }\\\\n\\\\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\\\\n\\\\n if (typeof cb !== 'function') cb = nop;\\\\n\\\\n if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\\\\n state.pendingcb++;\\\\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\\\\n }\\\\n\\\\n return ret;\\\\n};\\\\n\\\\nWritable.prototype.cork = function () {\\\\n var state = this._writableState;\\\\n\\\\n state.corked++;\\\\n};\\\\n\\\\nWritable.prototype.uncork = function () {\\\\n var state = this._writableState;\\\\n\\\\n if (state.corked) {\\\\n state.corked--;\\\\n\\\\n if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\\\\n }\\\\n};\\\\n\\\\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\\\\n // node::ParseEncoding() requires lower case.\\\\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\\\\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);\\\\n this._writableState.defaultEncoding = encoding;\\\\n return this;\\\\n};\\\\n\\\\nfunction decodeChunk(state, chunk, encoding) {\\\\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\\\\n chunk = Buffer.from(chunk, encoding);\\\\n }\\\\n return chunk;\\\\n}\\\\n\\\\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\\\\n // making it explicit this property is not enumerable\\\\n // because otherwise some prototype manipulation in\\\\n // userland will fail\\\\n enumerable: false,\\\\n get: function () {\\\\n return this._writableState.highWaterMark;\\\\n }\\\\n});\\\\n\\\\n// if we're already writing something, then just put this\\\\n// in the queue, and wait our turn. Otherwise, call _write\\\\n// If we return false, then we need a drain event, so set that flag.\\\\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\\\\n if (!isBuf) {\\\\n var newChunk = decodeChunk(state, chunk, encoding);\\\\n if (chunk !== newChunk) {\\\\n isBuf = true;\\\\n encoding = 'buffer';\\\\n chunk = newChunk;\\\\n }\\\\n }\\\\n var len = state.objectMode ? 1 : chunk.length;\\\\n\\\\n state.length += len;\\\\n\\\\n var ret = state.length < state.highWaterMark;\\\\n // we must ensure that previous needDrain will not be reset to false.\\\\n if (!ret) state.needDrain = true;\\\\n\\\\n if (state.writing || state.corked) {\\\\n var last = state.lastBufferedRequest;\\\\n state.lastBufferedRequest = {\\\\n chunk: chunk,\\\\n encoding: encoding,\\\\n isBuf: isBuf,\\\\n callback: cb,\\\\n next: null\\\\n };\\\\n if (last) {\\\\n last.next = state.lastBufferedRequest;\\\\n } else {\\\\n state.bufferedRequest = state.lastBufferedRequest;\\\\n }\\\\n state.bufferedRequestCount += 1;\\\\n } else {\\\\n doWrite(stream, state, false, len, chunk, encoding, cb);\\\\n }\\\\n\\\\n return ret;\\\\n}\\\\n\\\\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\\\\n state.writelen = len;\\\\n state.writecb = cb;\\\\n state.writing = true;\\\\n state.sync = true;\\\\n if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\\\\n state.sync = false;\\\\n}\\\\n\\\\nfunction onwriteError(stream, state, sync, er, cb) {\\\\n --state.pendingcb;\\\\n\\\\n if (sync) {\\\\n // defer the callback if we are being called synchronously\\\\n // to avoid piling up things on the stack\\\\n pna.nextTick(cb, er);\\\\n // this can emit finish, and it will always happen\\\\n // after error\\\\n pna.nextTick(finishMaybe, stream, state);\\\\n stream._writableState.errorEmitted = true;\\\\n stream.emit('error', er);\\\\n } else {\\\\n // the caller expect this to happen before if\\\\n // it is async\\\\n cb(er);\\\\n stream._writableState.errorEmitted = true;\\\\n stream.emit('error', er);\\\\n // this can emit finish, but finish must\\\\n // always follow error\\\\n finishMaybe(stream, state);\\\\n }\\\\n}\\\\n\\\\nfunction onwriteStateUpdate(state) {\\\\n state.writing = false;\\\\n state.writecb = null;\\\\n state.length -= state.writelen;\\\\n state.writelen = 0;\\\\n}\\\\n\\\\nfunction onwrite(stream, er) {\\\\n var state = stream._writableState;\\\\n var sync = state.sync;\\\\n var cb = state.writecb;\\\\n\\\\n onwriteStateUpdate(state);\\\\n\\\\n if (er) onwriteError(stream, state, sync, er, cb);else {\\\\n // Check if we're actually ready to finish, but don't emit yet\\\\n var finished = needFinish(state);\\\\n\\\\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\\\\n clearBuffer(stream, state);\\\\n }\\\\n\\\\n if (sync) {\\\\n /*<replacement>*/\\\\n asyncWrite(afterWrite, stream, state, finished, cb);\\\\n /*</replacement>*/\\\\n } else {\\\\n afterWrite(stream, state, finished, cb);\\\\n }\\\\n }\\\\n}\\\\n\\\\nfunction afterWrite(stream, state, finished, cb) {\\\\n if (!finished) onwriteDrain(stream, state);\\\\n state.pendingcb--;\\\\n cb();\\\\n finishMaybe(stream, state);\\\\n}\\\\n\\\\n// Must force callback to be called on nextTick, so that we don't\\\\n// emit 'drain' before the write() consumer gets the 'false' return\\\\n// value, and has a chance to attach a 'drain' listener.\\\\nfunction onwriteDrain(stream, state) {\\\\n if (state.length === 0 && state.needDrain) {\\\\n state.needDrain = false;\\\\n stream.emit('drain');\\\\n }\\\\n}\\\\n\\\\n// if there's something in the buffer waiting, then process it\\\\nfunction clearBuffer(stream, state) {\\\\n state.bufferProcessing = true;\\\\n var entry = state.bufferedRequest;\\\\n\\\\n if (stream._writev && entry && entry.next) {\\\\n // Fast case, write everything using _writev()\\\\n var l = state.bufferedRequestCount;\\\\n var buffer = new Array(l);\\\\n var holder = state.corkedRequestsFree;\\\\n holder.entry = entry;\\\\n\\\\n var count = 0;\\\\n var allBuffers = true;\\\\n while (entry) {\\\\n buffer[count] = entry;\\\\n if (!entry.isBuf) allBuffers = false;\\\\n entry = entry.next;\\\\n count += 1;\\\\n }\\\\n buffer.allBuffers = allBuffers;\\\\n\\\\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\\\\n\\\\n // doWrite is almost always async, defer these to save a bit of time\\\\n // as the hot path ends with doWrite\\\\n state.pendingcb++;\\\\n state.lastBufferedRequest = null;\\\\n if (holder.next) {\\\\n state.corkedRequestsFree = holder.next;\\\\n holder.next = null;\\\\n } else {\\\\n state.corkedRequestsFree = new CorkedRequest(state);\\\\n }\\\\n state.bufferedRequestCount = 0;\\\\n } else {\\\\n // Slow case, write chunks one-by-one\\\\n while (entry) {\\\\n var chunk = entry.chunk;\\\\n var encoding = entry.encoding;\\\\n var cb = entry.callback;\\\\n var len = state.objectMode ? 1 : chunk.length;\\\\n\\\\n doWrite(stream, state, false, len, chunk, encoding, cb);\\\\n entry = entry.next;\\\\n state.bufferedRequestCount--;\\\\n // if we didn't call the onwrite immediately, then\\\\n // it means that we need to wait until it does.\\\\n // also, that means that the chunk and cb are currently\\\\n // being processed, so move the buffer counter past them.\\\\n if (state.writing) {\\\\n break;\\\\n }\\\\n }\\\\n\\\\n if (entry === null) state.lastBufferedRequest = null;\\\\n }\\\\n\\\\n state.bufferedRequest = entry;\\\\n state.bufferProcessing = false;\\\\n}\\\\n\\\\nWritable.prototype._write = function (chunk, encoding, cb) {\\\\n cb(new Error('_write() is not implemented'));\\\\n};\\\\n\\\\nWritable.prototype._writev = null;\\\\n\\\\nWritable.prototype.end = function (chunk, encoding, cb) {\\\\n var state = this._writableState;\\\\n\\\\n if (typeof chunk === 'function') {\\\\n cb = chunk;\\\\n chunk = null;\\\\n encoding = null;\\\\n } else if (typeof encoding === 'function') {\\\\n cb = encoding;\\\\n encoding = null;\\\\n }\\\\n\\\\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\\\\n\\\\n // .end() fully uncorks\\\\n if (state.corked) {\\\\n state.corked = 1;\\\\n this.uncork();\\\\n }\\\\n\\\\n // ignore unnecessary end() calls.\\\\n if (!state.ending && !state.finished) endWritable(this, state, cb);\\\\n};\\\\n\\\\nfunction needFinish(state) {\\\\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\\\\n}\\\\nfunction callFinal(stream, state) {\\\\n stream._final(function (err) {\\\\n state.pendingcb--;\\\\n if (err) {\\\\n stream.emit('error', err);\\\\n }\\\\n state.prefinished = true;\\\\n stream.emit('prefinish');\\\\n finishMaybe(stream, state);\\\\n });\\\\n}\\\\nfunction prefinish(stream, state) {\\\\n if (!state.prefinished && !state.finalCalled) {\\\\n if (typeof stream._final === 'function') {\\\\n state.pendingcb++;\\\\n state.finalCalled = true;\\\\n pna.nextTick(callFinal, stream, state);\\\\n } else {\\\\n state.prefinished = true;\\\\n stream.emit('prefinish');\\\\n }\\\\n }\\\\n}\\\\n\\\\nfunction finishMaybe(stream, state) {\\\\n var need = needFinish(state);\\\\n if (need) {\\\\n prefinish(stream, state);\\\\n if (state.pendingcb === 0) {\\\\n state.finished = true;\\\\n stream.emit('finish');\\\\n }\\\\n }\\\\n return need;\\\\n}\\\\n\\\\nfunction endWritable(stream, state, cb) {\\\\n state.ending = true;\\\\n finishMaybe(stream, state);\\\\n if (cb) {\\\\n if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);\\\\n }\\\\n state.ended = true;\\\\n stream.writable = false;\\\\n}\\\\n\\\\nfunction onCorkedFinish(corkReq, state, err) {\\\\n var entry = corkReq.entry;\\\\n corkReq.entry = null;\\\\n while (entry) {\\\\n var cb = entry.callback;\\\\n state.pendingcb--;\\\\n cb(err);\\\\n entry = entry.next;\\\\n }\\\\n if (state.corkedRequestsFree) {\\\\n state.corkedRequestsFree.next = corkReq;\\\\n } else {\\\\n state.corkedRequestsFree = corkReq;\\\\n }\\\\n}\\\\n\\\\nObject.defineProperty(Writable.prototype, 'destroyed', {\\\\n get: function () {\\\\n if (this._writableState === undefined) {\\\\n return false;\\\\n }\\\\n return this._writableState.destroyed;\\\\n },\\\\n set: function (value) {\\\\n // we ignore the value if the stream\\\\n // has not been initialized yet\\\\n if (!this._writableState) {\\\\n return;\\\\n }\\\\n\\\\n // backward compatibility, the user is explicitly\\\\n // managing destroyed\\\\n this._writableState.destroyed = value;\\\\n }\\\\n});\\\\n\\\\nWritable.prototype.destroy = destroyImpl.destroy;\\\\nWritable.prototype._undestroy = destroyImpl.undestroy;\\\\nWritable.prototype._destroy = function (err, cb) {\\\\n this.end();\\\\n cb(err);\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/_stream_writable.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/internal/streams/BufferList.js\\\":\\n/*!*************************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/internal/streams/BufferList.js ***!\\n \\\\*************************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\\\\\\\"Cannot call a class as a function\\\\\\\"); } }\\\\n\\\\nvar Buffer = __webpack_require__(/*! safe-buffer */ \\\\\\\"./node_modules/readable-stream/node_modules/safe-buffer/index.js\\\\\\\").Buffer;\\\\nvar util = __webpack_require__(/*! util */ \\\\\\\"util\\\\\\\");\\\\n\\\\nfunction copyBuffer(src, target, offset) {\\\\n src.copy(target, offset);\\\\n}\\\\n\\\\nmodule.exports = function () {\\\\n function BufferList() {\\\\n _classCallCheck(this, BufferList);\\\\n\\\\n this.head = null;\\\\n this.tail = null;\\\\n this.length = 0;\\\\n }\\\\n\\\\n BufferList.prototype.push = function push(v) {\\\\n var entry = { data: v, next: null };\\\\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\\\\n this.tail = entry;\\\\n ++this.length;\\\\n };\\\\n\\\\n BufferList.prototype.unshift = function unshift(v) {\\\\n var entry = { data: v, next: this.head };\\\\n if (this.length === 0) this.tail = entry;\\\\n this.head = entry;\\\\n ++this.length;\\\\n };\\\\n\\\\n BufferList.prototype.shift = function shift() {\\\\n if (this.length === 0) return;\\\\n var ret = this.head.data;\\\\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\\\\n --this.length;\\\\n return ret;\\\\n };\\\\n\\\\n BufferList.prototype.clear = function clear() {\\\\n this.head = this.tail = null;\\\\n this.length = 0;\\\\n };\\\\n\\\\n BufferList.prototype.join = function join(s) {\\\\n if (this.length === 0) return '';\\\\n var p = this.head;\\\\n var ret = '' + p.data;\\\\n while (p = p.next) {\\\\n ret += s + p.data;\\\\n }return ret;\\\\n };\\\\n\\\\n BufferList.prototype.concat = function concat(n) {\\\\n if (this.length === 0) return Buffer.alloc(0);\\\\n if (this.length === 1) return this.head.data;\\\\n var ret = Buffer.allocUnsafe(n >>> 0);\\\\n var p = this.head;\\\\n var i = 0;\\\\n while (p) {\\\\n copyBuffer(p.data, ret, i);\\\\n i += p.data.length;\\\\n p = p.next;\\\\n }\\\\n return ret;\\\\n };\\\\n\\\\n return BufferList;\\\\n}();\\\\n\\\\nif (util && util.inspect && util.inspect.custom) {\\\\n module.exports.prototype[util.inspect.custom] = function () {\\\\n var obj = util.inspect({ length: this.length });\\\\n return this.constructor.name + ' ' + obj;\\\\n };\\\\n}\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/BufferList.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/internal/streams/destroy.js\\\":\\n/*!**********************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/internal/streams/destroy.js ***!\\n \\\\**********************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar pna = __webpack_require__(/*! process-nextick-args */ \\\\\\\"./node_modules/process-nextick-args/index.js\\\\\\\");\\\\n/*</replacement>*/\\\\n\\\\n// undocumented cb() API, needed for core, not for public API\\\\nfunction destroy(err, cb) {\\\\n var _this = this;\\\\n\\\\n var readableDestroyed = this._readableState && this._readableState.destroyed;\\\\n var writableDestroyed = this._writableState && this._writableState.destroyed;\\\\n\\\\n if (readableDestroyed || writableDestroyed) {\\\\n if (cb) {\\\\n cb(err);\\\\n } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {\\\\n pna.nextTick(emitErrorNT, this, err);\\\\n }\\\\n return this;\\\\n }\\\\n\\\\n // we set destroyed to true before firing error callbacks in order\\\\n // to make it re-entrance safe in case destroy() is called within callbacks\\\\n\\\\n if (this._readableState) {\\\\n this._readableState.destroyed = true;\\\\n }\\\\n\\\\n // if this is a duplex stream mark the writable part as destroyed as well\\\\n if (this._writableState) {\\\\n this._writableState.destroyed = true;\\\\n }\\\\n\\\\n this._destroy(err || null, function (err) {\\\\n if (!cb && err) {\\\\n pna.nextTick(emitErrorNT, _this, err);\\\\n if (_this._writableState) {\\\\n _this._writableState.errorEmitted = true;\\\\n }\\\\n } else if (cb) {\\\\n cb(err);\\\\n }\\\\n });\\\\n\\\\n return this;\\\\n}\\\\n\\\\nfunction undestroy() {\\\\n if (this._readableState) {\\\\n this._readableState.destroyed = false;\\\\n this._readableState.reading = false;\\\\n this._readableState.ended = false;\\\\n this._readableState.endEmitted = false;\\\\n }\\\\n\\\\n if (this._writableState) {\\\\n this._writableState.destroyed = false;\\\\n this._writableState.ended = false;\\\\n this._writableState.ending = false;\\\\n this._writableState.finished = false;\\\\n this._writableState.errorEmitted = false;\\\\n }\\\\n}\\\\n\\\\nfunction emitErrorNT(self, err) {\\\\n self.emit('error', err);\\\\n}\\\\n\\\\nmodule.exports = {\\\\n destroy: destroy,\\\\n undestroy: undestroy\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/destroy.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/lib/internal/streams/stream.js\\\":\\n/*!*********************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/lib/internal/streams/stream.js ***!\\n \\\\*********************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"module.exports = __webpack_require__(/*! stream */ \\\\\\\"stream\\\\\\\");\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/lib/internal/streams/stream.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/node_modules/safe-buffer/index.js\\\":\\n/*!************************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/node_modules/safe-buffer/index.js ***!\\n \\\\************************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"/* eslint-disable node/no-deprecated-api */\\\\nvar buffer = __webpack_require__(/*! buffer */ \\\\\\\"buffer\\\\\\\")\\\\nvar Buffer = buffer.Buffer\\\\n\\\\n// alternative to using Object.keys for old browsers\\\\nfunction copyProps (src, dst) {\\\\n for (var key in src) {\\\\n dst[key] = src[key]\\\\n }\\\\n}\\\\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\\\\n module.exports = buffer\\\\n} else {\\\\n // Copy properties from require('buffer')\\\\n copyProps(buffer, exports)\\\\n exports.Buffer = SafeBuffer\\\\n}\\\\n\\\\nfunction SafeBuffer (arg, encodingOrOffset, length) {\\\\n return Buffer(arg, encodingOrOffset, length)\\\\n}\\\\n\\\\n// Copy static methods from Buffer\\\\ncopyProps(Buffer, SafeBuffer)\\\\n\\\\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\\\\n if (typeof arg === 'number') {\\\\n throw new TypeError('Argument must not be a number')\\\\n }\\\\n return Buffer(arg, encodingOrOffset, length)\\\\n}\\\\n\\\\nSafeBuffer.alloc = function (size, fill, encoding) {\\\\n if (typeof size !== 'number') {\\\\n throw new TypeError('Argument must be a number')\\\\n }\\\\n var buf = Buffer(size)\\\\n if (fill !== undefined) {\\\\n if (typeof encoding === 'string') {\\\\n buf.fill(fill, encoding)\\\\n } else {\\\\n buf.fill(fill)\\\\n }\\\\n } else {\\\\n buf.fill(0)\\\\n }\\\\n return buf\\\\n}\\\\n\\\\nSafeBuffer.allocUnsafe = function (size) {\\\\n if (typeof size !== 'number') {\\\\n throw new TypeError('Argument must be a number')\\\\n }\\\\n return Buffer(size)\\\\n}\\\\n\\\\nSafeBuffer.allocUnsafeSlow = function (size) {\\\\n if (typeof size !== 'number') {\\\\n throw new TypeError('Argument must be a number')\\\\n }\\\\n return buffer.SlowBuffer(size)\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/node_modules/safe-buffer/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js\\\":\\n/*!****************************************************************************************!*\\\\\\n !*** ./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js ***!\\n \\\\****************************************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"// Copyright Joyent, Inc. and other Node contributors.\\\\n//\\\\n// Permission is hereby granted, free of charge, to any person obtaining a\\\\n// copy of this software and associated documentation files (the\\\\n// \\\\\\\"Software\\\\\\\"), to deal in the Software without restriction, including\\\\n// without limitation the rights to use, copy, modify, merge, publish,\\\\n// distribute, sublicense, and/or sell copies of the Software, and to permit\\\\n// persons to whom the Software is furnished to do so, subject to the\\\\n// following conditions:\\\\n//\\\\n// The above copyright notice and this permission notice shall be included\\\\n// in all copies or substantial portions of the Software.\\\\n//\\\\n// THE SOFTWARE IS PROVIDED \\\\\\\"AS IS\\\\\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\\\\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\\\\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\\\\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\\\\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\\\\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\\\\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\\\\n\\\\n\\\\n\\\\n/*<replacement>*/\\\\n\\\\nvar Buffer = __webpack_require__(/*! safe-buffer */ \\\\\\\"./node_modules/readable-stream/node_modules/safe-buffer/index.js\\\\\\\").Buffer;\\\\n/*</replacement>*/\\\\n\\\\nvar isEncoding = Buffer.isEncoding || function (encoding) {\\\\n encoding = '' + encoding;\\\\n switch (encoding && encoding.toLowerCase()) {\\\\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\\\\n return true;\\\\n default:\\\\n return false;\\\\n }\\\\n};\\\\n\\\\nfunction _normalizeEncoding(enc) {\\\\n if (!enc) return 'utf8';\\\\n var retried;\\\\n while (true) {\\\\n switch (enc) {\\\\n case 'utf8':\\\\n case 'utf-8':\\\\n return 'utf8';\\\\n case 'ucs2':\\\\n case 'ucs-2':\\\\n case 'utf16le':\\\\n case 'utf-16le':\\\\n return 'utf16le';\\\\n case 'latin1':\\\\n case 'binary':\\\\n return 'latin1';\\\\n case 'base64':\\\\n case 'ascii':\\\\n case 'hex':\\\\n return enc;\\\\n default:\\\\n if (retried) return; // undefined\\\\n enc = ('' + enc).toLowerCase();\\\\n retried = true;\\\\n }\\\\n }\\\\n};\\\\n\\\\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\\\\n// modules monkey-patch it to support additional encodings\\\\nfunction normalizeEncoding(enc) {\\\\n var nenc = _normalizeEncoding(enc);\\\\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\\\\n return nenc || enc;\\\\n}\\\\n\\\\n// StringDecoder provides an interface for efficiently splitting a series of\\\\n// buffers into a series of JS strings without breaking apart multi-byte\\\\n// characters.\\\\nexports.StringDecoder = StringDecoder;\\\\nfunction StringDecoder(encoding) {\\\\n this.encoding = normalizeEncoding(encoding);\\\\n var nb;\\\\n switch (this.encoding) {\\\\n case 'utf16le':\\\\n this.text = utf16Text;\\\\n this.end = utf16End;\\\\n nb = 4;\\\\n break;\\\\n case 'utf8':\\\\n this.fillLast = utf8FillLast;\\\\n nb = 4;\\\\n break;\\\\n case 'base64':\\\\n this.text = base64Text;\\\\n this.end = base64End;\\\\n nb = 3;\\\\n break;\\\\n default:\\\\n this.write = simpleWrite;\\\\n this.end = simpleEnd;\\\\n return;\\\\n }\\\\n this.lastNeed = 0;\\\\n this.lastTotal = 0;\\\\n this.lastChar = Buffer.allocUnsafe(nb);\\\\n}\\\\n\\\\nStringDecoder.prototype.write = function (buf) {\\\\n if (buf.length === 0) return '';\\\\n var r;\\\\n var i;\\\\n if (this.lastNeed) {\\\\n r = this.fillLast(buf);\\\\n if (r === undefined) return '';\\\\n i = this.lastNeed;\\\\n this.lastNeed = 0;\\\\n } else {\\\\n i = 0;\\\\n }\\\\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\\\\n return r || '';\\\\n};\\\\n\\\\nStringDecoder.prototype.end = utf8End;\\\\n\\\\n// Returns only complete characters in a Buffer\\\\nStringDecoder.prototype.text = utf8Text;\\\\n\\\\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\\\\nStringDecoder.prototype.fillLast = function (buf) {\\\\n if (this.lastNeed <= buf.length) {\\\\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\\\\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\\\\n }\\\\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\\\\n this.lastNeed -= buf.length;\\\\n};\\\\n\\\\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\\\\n// continuation byte. If an invalid byte is detected, -2 is returned.\\\\nfunction utf8CheckByte(byte) {\\\\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\\\\n return byte >> 6 === 0x02 ? -1 : -2;\\\\n}\\\\n\\\\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\\\\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\\\\n// needed to complete the UTF-8 character (if applicable) are returned.\\\\nfunction utf8CheckIncomplete(self, buf, i) {\\\\n var j = buf.length - 1;\\\\n if (j < i) return 0;\\\\n var nb = utf8CheckByte(buf[j]);\\\\n if (nb >= 0) {\\\\n if (nb > 0) self.lastNeed = nb - 1;\\\\n return nb;\\\\n }\\\\n if (--j < i || nb === -2) return 0;\\\\n nb = utf8CheckByte(buf[j]);\\\\n if (nb >= 0) {\\\\n if (nb > 0) self.lastNeed = nb - 2;\\\\n return nb;\\\\n }\\\\n if (--j < i || nb === -2) return 0;\\\\n nb = utf8CheckByte(buf[j]);\\\\n if (nb >= 0) {\\\\n if (nb > 0) {\\\\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\\\\n }\\\\n return nb;\\\\n }\\\\n return 0;\\\\n}\\\\n\\\\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\\\\n// needed or are available. If we see a non-continuation byte where we expect\\\\n// one, we \\\\\\\"replace\\\\\\\" the validated continuation bytes we've seen so far with\\\\n// a single UTF-8 replacement character ('\\\\\\\\ufffd'), to match v8's UTF-8 decoding\\\\n// behavior. The continuation byte check is included three times in the case\\\\n// where all of the continuation bytes for a character exist in the same buffer.\\\\n// It is also done this way as a slight performance increase instead of using a\\\\n// loop.\\\\nfunction utf8CheckExtraBytes(self, buf, p) {\\\\n if ((buf[0] & 0xC0) !== 0x80) {\\\\n self.lastNeed = 0;\\\\n return '\\\\\\\\ufffd';\\\\n }\\\\n if (self.lastNeed > 1 && buf.length > 1) {\\\\n if ((buf[1] & 0xC0) !== 0x80) {\\\\n self.lastNeed = 1;\\\\n return '\\\\\\\\ufffd';\\\\n }\\\\n if (self.lastNeed > 2 && buf.length > 2) {\\\\n if ((buf[2] & 0xC0) !== 0x80) {\\\\n self.lastNeed = 2;\\\\n return '\\\\\\\\ufffd';\\\\n }\\\\n }\\\\n }\\\\n}\\\\n\\\\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\\\\nfunction utf8FillLast(buf) {\\\\n var p = this.lastTotal - this.lastNeed;\\\\n var r = utf8CheckExtraBytes(this, buf, p);\\\\n if (r !== undefined) return r;\\\\n if (this.lastNeed <= buf.length) {\\\\n buf.copy(this.lastChar, p, 0, this.lastNeed);\\\\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\\\\n }\\\\n buf.copy(this.lastChar, p, 0, buf.length);\\\\n this.lastNeed -= buf.length;\\\\n}\\\\n\\\\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\\\\n// partial character, the character's bytes are buffered until the required\\\\n// number of bytes are available.\\\\nfunction utf8Text(buf, i) {\\\\n var total = utf8CheckIncomplete(this, buf, i);\\\\n if (!this.lastNeed) return buf.toString('utf8', i);\\\\n this.lastTotal = total;\\\\n var end = buf.length - (total - this.lastNeed);\\\\n buf.copy(this.lastChar, 0, end);\\\\n return buf.toString('utf8', i, end);\\\\n}\\\\n\\\\n// For UTF-8, a replacement character is added when ending on a partial\\\\n// character.\\\\nfunction utf8End(buf) {\\\\n var r = buf && buf.length ? this.write(buf) : '';\\\\n if (this.lastNeed) return r + '\\\\\\\\ufffd';\\\\n return r;\\\\n}\\\\n\\\\n// UTF-16LE typically needs two bytes per character, but even if we have an even\\\\n// number of bytes available, we need to check if we end on a leading/high\\\\n// surrogate. In that case, we need to wait for the next two bytes in order to\\\\n// decode the last character properly.\\\\nfunction utf16Text(buf, i) {\\\\n if ((buf.length - i) % 2 === 0) {\\\\n var r = buf.toString('utf16le', i);\\\\n if (r) {\\\\n var c = r.charCodeAt(r.length - 1);\\\\n if (c >= 0xD800 && c <= 0xDBFF) {\\\\n this.lastNeed = 2;\\\\n this.lastTotal = 4;\\\\n this.lastChar[0] = buf[buf.length - 2];\\\\n this.lastChar[1] = buf[buf.length - 1];\\\\n return r.slice(0, -1);\\\\n }\\\\n }\\\\n return r;\\\\n }\\\\n this.lastNeed = 1;\\\\n this.lastTotal = 2;\\\\n this.lastChar[0] = buf[buf.length - 1];\\\\n return buf.toString('utf16le', i, buf.length - 1);\\\\n}\\\\n\\\\n// For UTF-16LE we do not explicitly append special replacement characters if we\\\\n// end on a partial character, we simply let v8 handle that.\\\\nfunction utf16End(buf) {\\\\n var r = buf && buf.length ? this.write(buf) : '';\\\\n if (this.lastNeed) {\\\\n var end = this.lastTotal - this.lastNeed;\\\\n return r + this.lastChar.toString('utf16le', 0, end);\\\\n }\\\\n return r;\\\\n}\\\\n\\\\nfunction base64Text(buf, i) {\\\\n var n = (buf.length - i) % 3;\\\\n if (n === 0) return buf.toString('base64', i);\\\\n this.lastNeed = 3 - n;\\\\n this.lastTotal = 3;\\\\n if (n === 1) {\\\\n this.lastChar[0] = buf[buf.length - 1];\\\\n } else {\\\\n this.lastChar[0] = buf[buf.length - 2];\\\\n this.lastChar[1] = buf[buf.length - 1];\\\\n }\\\\n return buf.toString('base64', i, buf.length - n);\\\\n}\\\\n\\\\nfunction base64End(buf) {\\\\n var r = buf && buf.length ? this.write(buf) : '';\\\\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\\\\n return r;\\\\n}\\\\n\\\\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\\\\nfunction simpleWrite(buf) {\\\\n return buf.toString(this.encoding);\\\\n}\\\\n\\\\nfunction simpleEnd(buf) {\\\\n return buf && buf.length ? this.write(buf) : '';\\\\n}\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/node_modules/string_decoder/lib/string_decoder.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/readable-stream/readable.js\\\":\\n/*!**************************************************!*\\\\\\n !*** ./node_modules/readable-stream/readable.js ***!\\n \\\\**************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"var Stream = __webpack_require__(/*! stream */ \\\\\\\"stream\\\\\\\");\\\\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\\\\n module.exports = Stream;\\\\n exports = module.exports = Stream.Readable;\\\\n exports.Readable = Stream.Readable;\\\\n exports.Writable = Stream.Writable;\\\\n exports.Duplex = Stream.Duplex;\\\\n exports.Transform = Stream.Transform;\\\\n exports.PassThrough = Stream.PassThrough;\\\\n exports.Stream = Stream;\\\\n} else {\\\\n exports = module.exports = __webpack_require__(/*! ./lib/_stream_readable.js */ \\\\\\\"./node_modules/readable-stream/lib/_stream_readable.js\\\\\\\");\\\\n exports.Stream = Stream || exports;\\\\n exports.Readable = exports;\\\\n exports.Writable = __webpack_require__(/*! ./lib/_stream_writable.js */ \\\\\\\"./node_modules/readable-stream/lib/_stream_writable.js\\\\\\\");\\\\n exports.Duplex = __webpack_require__(/*! ./lib/_stream_duplex.js */ \\\\\\\"./node_modules/readable-stream/lib/_stream_duplex.js\\\\\\\");\\\\n exports.Transform = __webpack_require__(/*! ./lib/_stream_transform.js */ \\\\\\\"./node_modules/readable-stream/lib/_stream_transform.js\\\\\\\");\\\\n exports.PassThrough = __webpack_require__(/*! ./lib/_stream_passthrough.js */ \\\\\\\"./node_modules/readable-stream/lib/_stream_passthrough.js\\\\\\\");\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/readable-stream/readable.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/supports-color/index.js\\\":\\n/*!**********************************************!*\\\\\\n !*** ./node_modules/supports-color/index.js ***!\\n \\\\**********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\nvar argv = process.argv;\\\\n\\\\nvar terminator = argv.indexOf('--');\\\\nvar hasFlag = function (flag) {\\\\n\\\\tflag = '--' + flag;\\\\n\\\\tvar pos = argv.indexOf(flag);\\\\n\\\\treturn pos !== -1 && (terminator !== -1 ? pos < terminator : true);\\\\n};\\\\n\\\\nmodule.exports = (function () {\\\\n\\\\tif ('FORCE_COLOR' in process.env) {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\tif (hasFlag('no-color') ||\\\\n\\\\t\\\\thasFlag('no-colors') ||\\\\n\\\\t\\\\thasFlag('color=false')) {\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\tif (hasFlag('color') ||\\\\n\\\\t\\\\thasFlag('colors') ||\\\\n\\\\t\\\\thasFlag('color=true') ||\\\\n\\\\t\\\\thasFlag('color=always')) {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\tif (process.stdout && !process.stdout.isTTY) {\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\tif (process.platform === 'win32') {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\tif ('COLORTERM' in process.env) {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\tif (process.env.TERM === 'dumb') {\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\tif (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\treturn false;\\\\n})();\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/supports-color/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads-plugin/dist/loader.js?{\\\\\\\"name\\\\\\\":\\\\\\\"1\\\\\\\"}!./node_modules/geotiff/src/decoder.worker.js\\\":\\n/*!**************************************************************************************************************!*\\\\\\n !*** ./node_modules/threads-plugin/dist/loader.js?{\\\"name\\\":\\\"1\\\"}!./node_modules/geotiff/src/decoder.worker.js ***!\\n \\\\**************************************************************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"module.exports = __webpack_require__.p + \\\\\\\"1.08a977d792232ceaebd8.worker.worker.js\\\\\\\"\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/geotiff/src/decoder.worker.js?./node_modules/threads-plugin/dist/loader.js?%7B%22name%22:%221%22%7D\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/common.js\\\":\\n/*!*************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/common.js ***!\\n \\\\*************************************************/\\n/*! exports provided: registerSerializer, deserialize, serialize */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"registerSerializer\\\\\\\", function() { return registerSerializer; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"deserialize\\\\\\\", function() { return deserialize; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"serialize\\\\\\\", function() { return serialize; });\\\\n/* harmony import */ var _serializers__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./serializers */ \\\\\\\"./node_modules/threads/dist-esm/serializers.js\\\\\\\");\\\\n\\\\nlet registeredSerializer = _serializers__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"DefaultSerializer\\\\\\\"];\\\\nfunction registerSerializer(serializer) {\\\\n registeredSerializer = Object(_serializers__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"extendSerializer\\\\\\\"])(registeredSerializer, serializer);\\\\n}\\\\nfunction deserialize(message) {\\\\n return registeredSerializer.deserialize(message);\\\\n}\\\\nfunction serialize(input) {\\\\n return registeredSerializer.serialize(input);\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/common.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/index.js\\\":\\n/*!************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/index.js ***!\\n \\\\************************************************/\\n/*! exports provided: registerSerializer, Pool, spawn, Thread, isWorkerRuntime, BlobWorker, Worker, expose, DefaultSerializer, Transfer */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./common */ \\\\\\\"./node_modules/threads/dist-esm/common.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"registerSerializer\\\\\\\", function() { return _common__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"registerSerializer\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _master_index__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./master/index */ \\\\\\\"./node_modules/threads/dist-esm/master/index.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Pool\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Pool\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"spawn\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"spawn\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Thread\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Thread\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"isWorkerRuntime\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"BlobWorker\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"BlobWorker\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Worker\\\\\\\", function() { return _master_index__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Worker\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _worker_index__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./worker/index */ \\\\\\\"./node_modules/threads/dist-esm/worker/index.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"expose\\\\\\\", function() { return _worker_index__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"expose\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _serializers__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./serializers */ \\\\\\\"./node_modules/threads/dist-esm/serializers.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"DefaultSerializer\\\\\\\", function() { return _serializers__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"DefaultSerializer\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./transferable */ \\\\\\\"./node_modules/threads/dist-esm/transferable.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Transfer\\\\\\\", function() { return _transferable__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"Transfer\\\\\\\"]; });\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/get-bundle-url.browser.js\\\":\\n/*!************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/get-bundle-url.browser.js ***!\\n \\\\************************************************************************/\\n/*! exports provided: getBaseURL, getBundleURL */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getBaseURL\\\\\\\", function() { return getBaseURL; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getBundleURL\\\\\\\", function() { return getBundleURLCached; });\\\\n// Source: <https://github.com/parcel-bundler/parcel/blob/master/packages/core/parcel-bundler/src/builtins/bundle-url.js>\\\\nlet bundleURL;\\\\nfunction getBundleURLCached() {\\\\n if (!bundleURL) {\\\\n bundleURL = getBundleURL();\\\\n }\\\\n return bundleURL;\\\\n}\\\\nfunction getBundleURL() {\\\\n // Attempt to find the URL of the current script and use that as the base URL\\\\n try {\\\\n throw new Error;\\\\n }\\\\n catch (err) {\\\\n const matches = (\\\\\\\"\\\\\\\" + err.stack).match(/(https?|file|ftp|chrome-extension|moz-extension):\\\\\\\\/\\\\\\\\/[^)\\\\\\\\n]+/g);\\\\n if (matches) {\\\\n return getBaseURL(matches[0]);\\\\n }\\\\n }\\\\n return \\\\\\\"/\\\\\\\";\\\\n}\\\\nfunction getBaseURL(url) {\\\\n return (\\\\\\\"\\\\\\\" + url).replace(/^((?:https?|file|ftp|chrome-extension|moz-extension):\\\\\\\\/\\\\\\\\/.+)?\\\\\\\\/[^/]+(?:\\\\\\\\?.*)?$/, '$1') + '/';\\\\n}\\\\n\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/get-bundle-url.browser.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/implementation.browser.js\\\":\\n/*!************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/implementation.browser.js ***!\\n \\\\************************************************************************/\\n/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"defaultPoolSize\\\\\\\", function() { return defaultPoolSize; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getWorkerImplementation\\\\\\\", function() { return getWorkerImplementation; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return isWorkerRuntime; });\\\\n/* harmony import */ var _get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./get-bundle-url.browser */ \\\\\\\"./node_modules/threads/dist-esm/master/get-bundle-url.browser.js\\\\\\\");\\\\n// tslint:disable max-classes-per-file\\\\n\\\\nconst defaultPoolSize = typeof navigator !== \\\\\\\"undefined\\\\\\\" && navigator.hardwareConcurrency\\\\n ? navigator.hardwareConcurrency\\\\n : 4;\\\\nconst isAbsoluteURL = (value) => /^[a-zA-Z][a-zA-Z\\\\\\\\d+\\\\\\\\-.]*:/.test(value);\\\\nfunction createSourceBlobURL(code) {\\\\n const blob = new Blob([code], { type: \\\\\\\"application/javascript\\\\\\\" });\\\\n return URL.createObjectURL(blob);\\\\n}\\\\nfunction selectWorkerImplementation() {\\\\n if (typeof Worker === \\\\\\\"undefined\\\\\\\") {\\\\n // Might happen on Safari, for instance\\\\n // The idea is to only fail if the constructor is actually used\\\\n return class NoWebWorker {\\\\n constructor() {\\\\n throw Error(\\\\\\\"No web worker implementation available. You might have tried to spawn a worker within a worker in a browser that doesn't support workers in workers.\\\\\\\");\\\\n }\\\\n };\\\\n }\\\\n class WebWorker extends Worker {\\\\n constructor(url, options) {\\\\n var _a, _b;\\\\n if (typeof url === \\\\\\\"string\\\\\\\" && options && options._baseURL) {\\\\n url = new URL(url, options._baseURL);\\\\n }\\\\n else if (typeof url === \\\\\\\"string\\\\\\\" && !isAbsoluteURL(url) && Object(_get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"getBundleURL\\\\\\\"])().match(/^file:\\\\\\\\/\\\\\\\\//i)) {\\\\n url = new URL(url, Object(_get_bundle_url_browser__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"getBundleURL\\\\\\\"])().replace(/\\\\\\\\/[^\\\\\\\\/]+$/, \\\\\\\"/\\\\\\\"));\\\\n if ((_a = options === null || options === void 0 ? void 0 : options.CORSWorkaround) !== null && _a !== void 0 ? _a : true) {\\\\n url = createSourceBlobURL(`importScripts(${JSON.stringify(url)});`);\\\\n }\\\\n }\\\\n if (typeof url === \\\\\\\"string\\\\\\\" && isAbsoluteURL(url)) {\\\\n // Create source code blob loading JS file via `importScripts()`\\\\n // to circumvent worker CORS restrictions\\\\n if ((_b = options === null || options === void 0 ? void 0 : options.CORSWorkaround) !== null && _b !== void 0 ? _b : true) {\\\\n url = createSourceBlobURL(`importScripts(${JSON.stringify(url)});`);\\\\n }\\\\n }\\\\n super(url, options);\\\\n }\\\\n }\\\\n class BlobWorker extends WebWorker {\\\\n constructor(blob, options) {\\\\n const url = window.URL.createObjectURL(blob);\\\\n super(url, options);\\\\n }\\\\n static fromText(source, options) {\\\\n const blob = new window.Blob([source], { type: \\\\\\\"text/javascript\\\\\\\" });\\\\n return new BlobWorker(blob, options);\\\\n }\\\\n }\\\\n return {\\\\n blob: BlobWorker,\\\\n default: WebWorker\\\\n };\\\\n}\\\\nlet implementation;\\\\nfunction getWorkerImplementation() {\\\\n if (!implementation) {\\\\n implementation = selectWorkerImplementation();\\\\n }\\\\n return implementation;\\\\n}\\\\nfunction isWorkerRuntime() {\\\\n const isWindowContext = typeof self !== \\\\\\\"undefined\\\\\\\" && typeof Window !== \\\\\\\"undefined\\\\\\\" && self instanceof Window;\\\\n return typeof self !== \\\\\\\"undefined\\\\\\\" && self.postMessage && !isWindowContext ? true : false;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.browser.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/implementation.js\\\":\\n/*!****************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/implementation.js ***!\\n \\\\****************************************************************/\\n/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"defaultPoolSize\\\\\\\", function() { return defaultPoolSize; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getWorkerImplementation\\\\\\\", function() { return getWorkerImplementation; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return isWorkerRuntime; });\\\\n/* harmony import */ var _implementation_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation.browser */ \\\\\\\"./node_modules/threads/dist-esm/master/implementation.browser.js\\\\\\\");\\\\n/* harmony import */ var _implementation_node__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./implementation.node */ \\\\\\\"./node_modules/threads/dist-esm/master/implementation.node.js\\\\\\\");\\\\n/*\\\\n * This file is only a stub to make './implementation' resolve to the right module.\\\\n */\\\\n// We alias `src/master/implementation` to `src/master/implementation.browser` for web\\\\n// browsers already in the package.json, so if get here, it's safe to pass-through the\\\\n// node implementation\\\\n\\\\n\\\\nconst runningInNode = typeof process !== 'undefined' && process.arch !== 'browser' && 'pid' in process;\\\\nconst implementation = runningInNode ? _implementation_node__WEBPACK_IMPORTED_MODULE_1__ : _implementation_browser__WEBPACK_IMPORTED_MODULE_0__;\\\\n/** Default size of pools. Depending on the platform the value might vary from device to device. */\\\\nconst defaultPoolSize = implementation.defaultPoolSize;\\\\nconst getWorkerImplementation = implementation.getWorkerImplementation;\\\\n/** Returns `true` if this code is currently running in a worker. */\\\\nconst isWorkerRuntime = implementation.isWorkerRuntime;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/implementation.node.js\\\":\\n/*!*********************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/implementation.node.js ***!\\n \\\\*********************************************************************/\\n/*! exports provided: defaultPoolSize, getWorkerImplementation, isWorkerRuntime */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"defaultPoolSize\\\\\\\", function() { return defaultPoolSize; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"getWorkerImplementation\\\\\\\", function() { return getWorkerImplementation; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return isWorkerRuntime; });\\\\n/* harmony import */ var callsites__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! callsites */ \\\\\\\"./node_modules/callsites/index.js\\\\\\\");\\\\n/* harmony import */ var callsites__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(callsites__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var events__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! events */ \\\\\\\"events\\\\\\\");\\\\n/* harmony import */ var events__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(events__WEBPACK_IMPORTED_MODULE_1__);\\\\n/* harmony import */ var os__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! os */ \\\\\\\"os\\\\\\\");\\\\n/* harmony import */ var os__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(os__WEBPACK_IMPORTED_MODULE_2__);\\\\n/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! path */ \\\\\\\"path\\\\\\\");\\\\n/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_3__);\\\\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! url */ \\\\\\\"url\\\\\\\");\\\\n/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(url__WEBPACK_IMPORTED_MODULE_4__);\\\\n/// <reference lib=\\\\\\\"dom\\\\\\\" />\\\\n// tslint:disable function-constructor no-eval no-duplicate-super max-classes-per-file\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nlet tsNodeAvailable;\\\\nconst defaultPoolSize = Object(os__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"cpus\\\\\\\"])().length;\\\\nfunction detectTsNode() {\\\\n if (typeof require === \\\\\\\"function\\\\\\\") {\\\\n // Webpack build: => No ts-node required or possible\\\\n return false;\\\\n }\\\\n if (tsNodeAvailable) {\\\\n return tsNodeAvailable;\\\\n }\\\\n try {\\\\n eval(\\\\\\\"require\\\\\\\").resolve(\\\\\\\"ts-node\\\\\\\");\\\\n tsNodeAvailable = true;\\\\n }\\\\n catch (error) {\\\\n if (error && error.code === \\\\\\\"MODULE_NOT_FOUND\\\\\\\") {\\\\n tsNodeAvailable = false;\\\\n }\\\\n else {\\\\n // Re-throw\\\\n throw error;\\\\n }\\\\n }\\\\n return tsNodeAvailable;\\\\n}\\\\nfunction createTsNodeModule(scriptPath) {\\\\n const content = `\\\\n require(\\\\\\\"ts-node/register/transpile-only\\\\\\\");\\\\n require(${JSON.stringify(scriptPath)});\\\\n `;\\\\n return content;\\\\n}\\\\nfunction rebaseScriptPath(scriptPath, ignoreRegex) {\\\\n const parentCallSite = callsites__WEBPACK_IMPORTED_MODULE_0___default()().find((callsite) => {\\\\n const filename = callsite.getFileName();\\\\n return Boolean(filename &&\\\\n !filename.match(ignoreRegex) &&\\\\n !filename.match(/[\\\\\\\\/\\\\\\\\\\\\\\\\]master[\\\\\\\\/\\\\\\\\\\\\\\\\]implementation/) &&\\\\n !filename.match(/^internal\\\\\\\\/process/));\\\\n });\\\\n const rawCallerPath = parentCallSite ? parentCallSite.getFileName() : null;\\\\n let callerPath = rawCallerPath ? rawCallerPath : null;\\\\n if (callerPath && callerPath.startsWith('file:')) {\\\\n callerPath = Object(url__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"fileURLToPath\\\\\\\"])(callerPath);\\\\n }\\\\n const rebasedScriptPath = callerPath ? path__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"join\\\\\\\"](path__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"dirname\\\\\\\"](callerPath), scriptPath) : scriptPath;\\\\n return rebasedScriptPath;\\\\n}\\\\nfunction resolveScriptPath(scriptPath, baseURL) {\\\\n const makeRelative = (filePath) => {\\\\n // eval() hack is also webpack-related\\\\n return path__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"isAbsolute\\\\\\\"](filePath) ? filePath : path__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"join\\\\\\\"](baseURL || eval(\\\\\\\"__dirname\\\\\\\"), filePath);\\\\n };\\\\n const workerFilePath = typeof require === \\\\\\\"function\\\\\\\"\\\\n ? require.resolve(makeRelative(scriptPath))\\\\n : eval(\\\\\\\"require\\\\\\\").resolve(makeRelative(rebaseScriptPath(scriptPath, /[\\\\\\\\/\\\\\\\\\\\\\\\\]worker_threads[\\\\\\\\/\\\\\\\\\\\\\\\\]/)));\\\\n return workerFilePath;\\\\n}\\\\nfunction initWorkerThreadsWorker() {\\\\n // Webpack hack\\\\n const NativeWorker = typeof require === \\\\\\\"function\\\\\\\"\\\\n ? require(\\\\\\\"worker_threads\\\\\\\").Worker\\\\n : eval(\\\\\\\"require\\\\\\\")(\\\\\\\"worker_threads\\\\\\\").Worker;\\\\n let allWorkers = [];\\\\n class Worker extends NativeWorker {\\\\n constructor(scriptPath, options) {\\\\n const resolvedScriptPath = options && options.fromSource\\\\n ? null\\\\n : resolveScriptPath(scriptPath, (options || {})._baseURL);\\\\n if (!resolvedScriptPath) {\\\\n // `options.fromSource` is true\\\\n const sourceCode = scriptPath;\\\\n super(sourceCode, Object.assign(Object.assign({}, options), { eval: true }));\\\\n }\\\\n else if (resolvedScriptPath.match(/\\\\\\\\.tsx?$/i) && detectTsNode()) {\\\\n super(createTsNodeModule(resolvedScriptPath), Object.assign(Object.assign({}, options), { eval: true }));\\\\n }\\\\n else if (resolvedScriptPath.match(/\\\\\\\\.asar[\\\\\\\\/\\\\\\\\\\\\\\\\]/)) {\\\\n // See <https://github.com/andywer/threads-plugin/issues/17>\\\\n super(resolvedScriptPath.replace(/\\\\\\\\.asar([\\\\\\\\/\\\\\\\\\\\\\\\\])/, \\\\\\\".asar.unpacked$1\\\\\\\"), options);\\\\n }\\\\n else {\\\\n super(resolvedScriptPath, options);\\\\n }\\\\n this.mappedEventListeners = new WeakMap();\\\\n allWorkers.push(this);\\\\n }\\\\n addEventListener(eventName, rawListener) {\\\\n const listener = (message) => {\\\\n rawListener({ data: message });\\\\n };\\\\n this.mappedEventListeners.set(rawListener, listener);\\\\n this.on(eventName, listener);\\\\n }\\\\n removeEventListener(eventName, rawListener) {\\\\n const listener = this.mappedEventListeners.get(rawListener) || rawListener;\\\\n this.off(eventName, listener);\\\\n }\\\\n }\\\\n const terminateWorkersAndMaster = () => {\\\\n // we should terminate all workers and then gracefully shutdown self process\\\\n Promise.all(allWorkers.map(worker => worker.terminate())).then(() => process.exit(0), () => process.exit(1));\\\\n allWorkers = [];\\\\n };\\\\n // Take care to not leave orphaned processes behind. See #147.\\\\n process.on(\\\\\\\"SIGINT\\\\\\\", () => terminateWorkersAndMaster());\\\\n process.on(\\\\\\\"SIGTERM\\\\\\\", () => terminateWorkersAndMaster());\\\\n class BlobWorker extends Worker {\\\\n constructor(blob, options) {\\\\n super(Buffer.from(blob).toString(\\\\\\\"utf-8\\\\\\\"), Object.assign(Object.assign({}, options), { fromSource: true }));\\\\n }\\\\n static fromText(source, options) {\\\\n return new Worker(source, Object.assign(Object.assign({}, options), { fromSource: true }));\\\\n }\\\\n }\\\\n return {\\\\n blob: BlobWorker,\\\\n default: Worker\\\\n };\\\\n}\\\\nfunction initTinyWorker() {\\\\n const TinyWorker = __webpack_require__(/*! tiny-worker */ \\\\\\\"./node_modules/tiny-worker/lib/index.js\\\\\\\");\\\\n let allWorkers = [];\\\\n class Worker extends TinyWorker {\\\\n constructor(scriptPath, options) {\\\\n // Need to apply a work-around for Windows or it will choke upon the absolute path\\\\n // (`Error [ERR_INVALID_PROTOCOL]: Protocol 'c:' not supported`)\\\\n const resolvedScriptPath = options && options.fromSource\\\\n ? null\\\\n : process.platform === \\\\\\\"win32\\\\\\\"\\\\n ? `file:///${resolveScriptPath(scriptPath).replace(/\\\\\\\\\\\\\\\\/g, \\\\\\\"/\\\\\\\")}`\\\\n : resolveScriptPath(scriptPath);\\\\n if (!resolvedScriptPath) {\\\\n // `options.fromSource` is true\\\\n const sourceCode = scriptPath;\\\\n super(new Function(sourceCode), [], { esm: true });\\\\n }\\\\n else if (resolvedScriptPath.match(/\\\\\\\\.tsx?$/i) && detectTsNode()) {\\\\n super(new Function(createTsNodeModule(resolveScriptPath(scriptPath))), [], { esm: true });\\\\n }\\\\n else if (resolvedScriptPath.match(/\\\\\\\\.asar[\\\\\\\\/\\\\\\\\\\\\\\\\]/)) {\\\\n // See <https://github.com/andywer/threads-plugin/issues/17>\\\\n super(resolvedScriptPath.replace(/\\\\\\\\.asar([\\\\\\\\/\\\\\\\\\\\\\\\\])/, \\\\\\\".asar.unpacked$1\\\\\\\"), [], { esm: true });\\\\n }\\\\n else {\\\\n super(resolvedScriptPath, [], { esm: true });\\\\n }\\\\n allWorkers.push(this);\\\\n this.emitter = new events__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"EventEmitter\\\\\\\"]();\\\\n this.onerror = (error) => this.emitter.emit(\\\\\\\"error\\\\\\\", error);\\\\n this.onmessage = (message) => this.emitter.emit(\\\\\\\"message\\\\\\\", message);\\\\n }\\\\n addEventListener(eventName, listener) {\\\\n this.emitter.addListener(eventName, listener);\\\\n }\\\\n removeEventListener(eventName, listener) {\\\\n this.emitter.removeListener(eventName, listener);\\\\n }\\\\n terminate() {\\\\n allWorkers = allWorkers.filter(worker => worker !== this);\\\\n return super.terminate();\\\\n }\\\\n }\\\\n const terminateWorkersAndMaster = () => {\\\\n // we should terminate all workers and then gracefully shutdown self process\\\\n Promise.all(allWorkers.map(worker => worker.terminate())).then(() => process.exit(0), () => process.exit(1));\\\\n allWorkers = [];\\\\n };\\\\n // Take care to not leave orphaned processes behind\\\\n // See <https://github.com/avoidwork/tiny-worker#faq>\\\\n process.on(\\\\\\\"SIGINT\\\\\\\", () => terminateWorkersAndMaster());\\\\n process.on(\\\\\\\"SIGTERM\\\\\\\", () => terminateWorkersAndMaster());\\\\n class BlobWorker extends Worker {\\\\n constructor(blob, options) {\\\\n super(Buffer.from(blob).toString(\\\\\\\"utf-8\\\\\\\"), Object.assign(Object.assign({}, options), { fromSource: true }));\\\\n }\\\\n static fromText(source, options) {\\\\n return new Worker(source, Object.assign(Object.assign({}, options), { fromSource: true }));\\\\n }\\\\n }\\\\n return {\\\\n blob: BlobWorker,\\\\n default: Worker\\\\n };\\\\n}\\\\nlet implementation;\\\\nlet isTinyWorker;\\\\nfunction selectWorkerImplementation() {\\\\n try {\\\\n isTinyWorker = false;\\\\n return initWorkerThreadsWorker();\\\\n }\\\\n catch (error) {\\\\n // tslint:disable-next-line no-console\\\\n console.debug(\\\\\\\"Node worker_threads not available. Trying to fall back to tiny-worker polyfill...\\\\\\\");\\\\n isTinyWorker = true;\\\\n return initTinyWorker();\\\\n }\\\\n}\\\\nfunction getWorkerImplementation() {\\\\n if (!implementation) {\\\\n implementation = selectWorkerImplementation();\\\\n }\\\\n return implementation;\\\\n}\\\\nfunction isWorkerRuntime() {\\\\n if (isTinyWorker) {\\\\n return typeof self !== \\\\\\\"undefined\\\\\\\" && self.postMessage ? true : false;\\\\n }\\\\n else {\\\\n // Webpack hack\\\\n const isMainThread = typeof require === \\\\\\\"function\\\\\\\"\\\\n ? require(\\\\\\\"worker_threads\\\\\\\").isMainThread\\\\n : eval(\\\\\\\"require\\\\\\\")(\\\\\\\"worker_threads\\\\\\\").isMainThread;\\\\n return !isMainThread;\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/implementation.node.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/index.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/index.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: Pool, spawn, Thread, isWorkerRuntime, BlobWorker, Worker */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"BlobWorker\\\\\\\", function() { return BlobWorker; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Worker\\\\\\\", function() { return Worker; });\\\\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation */ \\\\\\\"./node_modules/threads/dist-esm/master/implementation.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return _implementation__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"isWorkerRuntime\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _pool__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./pool */ \\\\\\\"./node_modules/threads/dist-esm/master/pool.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Pool\\\\\\\", function() { return _pool__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Pool\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _spawn__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./spawn */ \\\\\\\"./node_modules/threads/dist-esm/master/spawn.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"spawn\\\\\\\", function() { return _spawn__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"spawn\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _thread__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./thread */ \\\\\\\"./node_modules/threads/dist-esm/master/thread.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Thread\\\\\\\", function() { return _thread__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"Thread\\\\\\\"]; });\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n/** Separate class to spawn workers from source code blobs or strings. */\\\\nconst BlobWorker = Object(_implementation__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"getWorkerImplementation\\\\\\\"])().blob;\\\\n/** Worker implementation. Either web worker or a node.js Worker class. */\\\\nconst Worker = Object(_implementation__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"getWorkerImplementation\\\\\\\"])().default;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/invocation-proxy.js\\\":\\n/*!******************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/invocation-proxy.js ***!\\n \\\\******************************************************************/\\n/*! exports provided: createProxyFunction, createProxyModule */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"createProxyFunction\\\\\\\", function() { return createProxyFunction; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"createProxyModule\\\\\\\", function() { return createProxyModule; });\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \\\\\\\"./node_modules/threads/node_modules/debug/src/index.js\\\\\\\");\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \\\\\\\"./node_modules/observable-fns/dist.esm/index.js\\\\\\\");\\\\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../common */ \\\\\\\"./node_modules/threads/dist-esm/common.js\\\\\\\");\\\\n/* harmony import */ var _observable_promise__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../observable-promise */ \\\\\\\"./node_modules/threads/dist-esm/observable-promise.js\\\\\\\");\\\\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../transferable */ \\\\\\\"./node_modules/threads/dist-esm/transferable.js\\\\\\\");\\\\n/* harmony import */ var _types_messages__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/messages */ \\\\\\\"./node_modules/threads/dist-esm/types/messages.js\\\\\\\");\\\\n/*\\\\n * This source file contains the code for proxying calls in the master thread to calls in the workers\\\\n * by `.postMessage()`-ing.\\\\n *\\\\n * Keep in mind that this code can make or break the program's performance! Need to optimize more\\u2026\\\\n */\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nconst debugMessages = debug__WEBPACK_IMPORTED_MODULE_0___default()(\\\\\\\"threads:master:messages\\\\\\\");\\\\nlet nextJobUID = 1;\\\\nconst dedupe = (array) => Array.from(new Set(array));\\\\nconst isJobErrorMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerMessageType\\\\\\\"].error;\\\\nconst isJobResultMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerMessageType\\\\\\\"].result;\\\\nconst isJobStartMessage = (data) => data && data.type === _types_messages__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerMessageType\\\\\\\"].running;\\\\nfunction createObservableForJob(worker, jobUID) {\\\\n return new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Observable\\\\\\\"](observer => {\\\\n let asyncType;\\\\n const messageHandler = ((event) => {\\\\n debugMessages(\\\\\\\"Message from worker:\\\\\\\", event.data);\\\\n if (!event.data || event.data.uid !== jobUID)\\\\n return;\\\\n if (isJobStartMessage(event.data)) {\\\\n asyncType = event.data.resultType;\\\\n }\\\\n else if (isJobResultMessage(event.data)) {\\\\n if (asyncType === \\\\\\\"promise\\\\\\\") {\\\\n if (typeof event.data.payload !== \\\\\\\"undefined\\\\\\\") {\\\\n observer.next(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"deserialize\\\\\\\"])(event.data.payload));\\\\n }\\\\n observer.complete();\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n }\\\\n else {\\\\n if (event.data.payload) {\\\\n observer.next(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"deserialize\\\\\\\"])(event.data.payload));\\\\n }\\\\n if (event.data.complete) {\\\\n observer.complete();\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n }\\\\n }\\\\n }\\\\n else if (isJobErrorMessage(event.data)) {\\\\n const error = Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"deserialize\\\\\\\"])(event.data.error);\\\\n if (asyncType === \\\\\\\"promise\\\\\\\" || !asyncType) {\\\\n observer.error(error);\\\\n }\\\\n else {\\\\n observer.error(error);\\\\n }\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n }\\\\n });\\\\n worker.addEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n return () => {\\\\n if (asyncType === \\\\\\\"observable\\\\\\\" || !asyncType) {\\\\n const cancelMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"MasterMessageType\\\\\\\"].cancel,\\\\n uid: jobUID\\\\n };\\\\n worker.postMessage(cancelMessage);\\\\n }\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n };\\\\n });\\\\n}\\\\nfunction prepareArguments(rawArgs) {\\\\n if (rawArgs.length === 0) {\\\\n // Exit early if possible\\\\n return {\\\\n args: [],\\\\n transferables: []\\\\n };\\\\n }\\\\n const args = [];\\\\n const transferables = [];\\\\n for (const arg of rawArgs) {\\\\n if (Object(_transferable__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"isTransferDescriptor\\\\\\\"])(arg)) {\\\\n args.push(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"serialize\\\\\\\"])(arg.send));\\\\n transferables.push(...arg.transferables);\\\\n }\\\\n else {\\\\n args.push(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"serialize\\\\\\\"])(arg));\\\\n }\\\\n }\\\\n return {\\\\n args,\\\\n transferables: transferables.length === 0 ? transferables : dedupe(transferables)\\\\n };\\\\n}\\\\nfunction createProxyFunction(worker, method) {\\\\n return ((...rawArgs) => {\\\\n const uid = nextJobUID++;\\\\n const { args, transferables } = prepareArguments(rawArgs);\\\\n const runMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"MasterMessageType\\\\\\\"].run,\\\\n uid,\\\\n method,\\\\n args\\\\n };\\\\n debugMessages(\\\\\\\"Sending command to run function to worker:\\\\\\\", runMessage);\\\\n try {\\\\n worker.postMessage(runMessage, transferables);\\\\n }\\\\n catch (error) {\\\\n return _observable_promise__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"ObservablePromise\\\\\\\"].from(Promise.reject(error));\\\\n }\\\\n return _observable_promise__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"ObservablePromise\\\\\\\"].from(Object(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"multicast\\\\\\\"])(createObservableForJob(worker, uid)));\\\\n });\\\\n}\\\\nfunction createProxyModule(worker, methodNames) {\\\\n const proxy = {};\\\\n for (const methodName of methodNames) {\\\\n proxy[methodName] = createProxyFunction(worker, methodName);\\\\n }\\\\n return proxy;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/invocation-proxy.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/pool-types.js\\\":\\n/*!************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/pool-types.js ***!\\n \\\\************************************************************/\\n/*! exports provided: PoolEventType */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"PoolEventType\\\\\\\", function() { return PoolEventType; });\\\\n/** Pool event type. Specifies the type of each `PoolEvent`. */\\\\nvar PoolEventType;\\\\n(function (PoolEventType) {\\\\n PoolEventType[\\\\\\\"initialized\\\\\\\"] = \\\\\\\"initialized\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskCanceled\\\\\\\"] = \\\\\\\"taskCanceled\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskCompleted\\\\\\\"] = \\\\\\\"taskCompleted\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskFailed\\\\\\\"] = \\\\\\\"taskFailed\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskQueued\\\\\\\"] = \\\\\\\"taskQueued\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskQueueDrained\\\\\\\"] = \\\\\\\"taskQueueDrained\\\\\\\";\\\\n PoolEventType[\\\\\\\"taskStart\\\\\\\"] = \\\\\\\"taskStart\\\\\\\";\\\\n PoolEventType[\\\\\\\"terminated\\\\\\\"] = \\\\\\\"terminated\\\\\\\";\\\\n})(PoolEventType || (PoolEventType = {}));\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/pool-types.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/pool.js\\\":\\n/*!******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/pool.js ***!\\n \\\\******************************************************/\\n/*! exports provided: PoolEventType, Thread, Pool */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Pool\\\\\\\", function() { return Pool; });\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \\\\\\\"./node_modules/threads/node_modules/debug/src/index.js\\\\\\\");\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \\\\\\\"./node_modules/observable-fns/dist.esm/index.js\\\\\\\");\\\\n/* harmony import */ var _ponyfills__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../ponyfills */ \\\\\\\"./node_modules/threads/dist-esm/ponyfills.js\\\\\\\");\\\\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./implementation */ \\\\\\\"./node_modules/threads/dist-esm/master/implementation.js\\\\\\\");\\\\n/* harmony import */ var _pool_types__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./pool-types */ \\\\\\\"./node_modules/threads/dist-esm/master/pool-types.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"PoolEventType\\\\\\\", function() { return _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"]; });\\\\n\\\\n/* harmony import */ var _thread__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./thread */ \\\\\\\"./node_modules/threads/dist-esm/master/thread.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Thread\\\\\\\", function() { return _thread__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"Thread\\\\\\\"]; });\\\\n\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nlet nextPoolID = 1;\\\\nfunction createArray(size) {\\\\n const array = [];\\\\n for (let index = 0; index < size; index++) {\\\\n array.push(index);\\\\n }\\\\n return array;\\\\n}\\\\nfunction delay(ms) {\\\\n return new Promise(resolve => setTimeout(resolve, ms));\\\\n}\\\\nfunction flatMap(array, mapper) {\\\\n return array.reduce((flattened, element) => [...flattened, ...mapper(element)], []);\\\\n}\\\\nfunction slugify(text) {\\\\n return text.replace(/\\\\\\\\W/g, \\\\\\\" \\\\\\\").trim().replace(/\\\\\\\\s+/g, \\\\\\\"-\\\\\\\");\\\\n}\\\\nfunction spawnWorkers(spawnWorker, count) {\\\\n return createArray(count).map(() => ({\\\\n init: spawnWorker(),\\\\n runningTasks: []\\\\n }));\\\\n}\\\\nclass WorkerPool {\\\\n constructor(spawnWorker, optionsOrSize) {\\\\n this.eventSubject = new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Subject\\\\\\\"]();\\\\n this.initErrors = [];\\\\n this.isClosing = false;\\\\n this.nextTaskID = 1;\\\\n this.taskQueue = [];\\\\n const options = typeof optionsOrSize === \\\\\\\"number\\\\\\\"\\\\n ? { size: optionsOrSize }\\\\n : optionsOrSize || {};\\\\n const { size = _implementation__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"defaultPoolSize\\\\\\\"] } = options;\\\\n this.debug = debug__WEBPACK_IMPORTED_MODULE_0___default()(`threads:pool:${slugify(options.name || String(nextPoolID++))}`);\\\\n this.options = options;\\\\n this.workers = spawnWorkers(spawnWorker, size);\\\\n this.eventObservable = Object(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"multicast\\\\\\\"])(observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Observable\\\\\\\"].from(this.eventSubject));\\\\n Promise.all(this.workers.map(worker => worker.init)).then(() => this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].initialized,\\\\n size: this.workers.length\\\\n }), error => {\\\\n this.debug(\\\\\\\"Error while initializing pool worker:\\\\\\\", error);\\\\n this.eventSubject.error(error);\\\\n this.initErrors.push(error);\\\\n });\\\\n }\\\\n findIdlingWorker() {\\\\n const { concurrency = 1 } = this.options;\\\\n return this.workers.find(worker => worker.runningTasks.length < concurrency);\\\\n }\\\\n runPoolTask(worker, task) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n const workerID = this.workers.indexOf(worker) + 1;\\\\n this.debug(`Running task #${task.id} on worker #${workerID}...`);\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskStart,\\\\n taskID: task.id,\\\\n workerID\\\\n });\\\\n try {\\\\n const returnValue = yield task.run(yield worker.init);\\\\n this.debug(`Task #${task.id} completed successfully`);\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskCompleted,\\\\n returnValue,\\\\n taskID: task.id,\\\\n workerID\\\\n });\\\\n }\\\\n catch (error) {\\\\n this.debug(`Task #${task.id} failed`);\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskFailed,\\\\n taskID: task.id,\\\\n error,\\\\n workerID\\\\n });\\\\n }\\\\n });\\\\n }\\\\n run(worker, task) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n const runPromise = (() => __awaiter(this, void 0, void 0, function* () {\\\\n const removeTaskFromWorkersRunningTasks = () => {\\\\n worker.runningTasks = worker.runningTasks.filter(someRunPromise => someRunPromise !== runPromise);\\\\n };\\\\n // Defer task execution by one tick to give handlers time to subscribe\\\\n yield delay(0);\\\\n try {\\\\n yield this.runPoolTask(worker, task);\\\\n }\\\\n finally {\\\\n removeTaskFromWorkersRunningTasks();\\\\n if (!this.isClosing) {\\\\n this.scheduleWork();\\\\n }\\\\n }\\\\n }))();\\\\n worker.runningTasks.push(runPromise);\\\\n });\\\\n }\\\\n scheduleWork() {\\\\n this.debug(`Attempt de-queueing a task in order to run it...`);\\\\n const availableWorker = this.findIdlingWorker();\\\\n if (!availableWorker)\\\\n return;\\\\n const nextTask = this.taskQueue.shift();\\\\n if (!nextTask) {\\\\n this.debug(`Task queue is empty`);\\\\n this.eventSubject.next({ type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskQueueDrained });\\\\n return;\\\\n }\\\\n this.run(availableWorker, nextTask);\\\\n }\\\\n taskCompletion(taskID) {\\\\n return new Promise((resolve, reject) => {\\\\n const eventSubscription = this.events().subscribe(event => {\\\\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskCompleted && event.taskID === taskID) {\\\\n eventSubscription.unsubscribe();\\\\n resolve(event.returnValue);\\\\n }\\\\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskFailed && event.taskID === taskID) {\\\\n eventSubscription.unsubscribe();\\\\n reject(event.error);\\\\n }\\\\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].terminated) {\\\\n eventSubscription.unsubscribe();\\\\n reject(Error(\\\\\\\"Pool has been terminated before task was run.\\\\\\\"));\\\\n }\\\\n });\\\\n });\\\\n }\\\\n settled(allowResolvingImmediately = false) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n const getCurrentlyRunningTasks = () => flatMap(this.workers, worker => worker.runningTasks);\\\\n const taskFailures = [];\\\\n const failureSubscription = this.eventObservable.subscribe(event => {\\\\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskFailed) {\\\\n taskFailures.push(event.error);\\\\n }\\\\n });\\\\n if (this.initErrors.length > 0) {\\\\n return Promise.reject(this.initErrors[0]);\\\\n }\\\\n if (allowResolvingImmediately && this.taskQueue.length === 0) {\\\\n yield Object(_ponyfills__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"allSettled\\\\\\\"])(getCurrentlyRunningTasks());\\\\n return taskFailures;\\\\n }\\\\n yield new Promise((resolve, reject) => {\\\\n const subscription = this.eventObservable.subscribe({\\\\n next(event) {\\\\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskQueueDrained) {\\\\n subscription.unsubscribe();\\\\n resolve(void 0);\\\\n }\\\\n },\\\\n error: reject // make a pool-wide error reject the completed() result promise\\\\n });\\\\n });\\\\n yield Object(_ponyfills__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"allSettled\\\\\\\"])(getCurrentlyRunningTasks());\\\\n failureSubscription.unsubscribe();\\\\n return taskFailures;\\\\n });\\\\n }\\\\n completed(allowResolvingImmediately = false) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n const settlementPromise = this.settled(allowResolvingImmediately);\\\\n const earlyExitPromise = new Promise((resolve, reject) => {\\\\n const subscription = this.eventObservable.subscribe({\\\\n next(event) {\\\\n if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskQueueDrained) {\\\\n subscription.unsubscribe();\\\\n resolve(settlementPromise);\\\\n }\\\\n else if (event.type === _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskFailed) {\\\\n subscription.unsubscribe();\\\\n reject(event.error);\\\\n }\\\\n },\\\\n error: reject // make a pool-wide error reject the completed() result promise\\\\n });\\\\n });\\\\n const errors = yield Promise.race([\\\\n settlementPromise,\\\\n earlyExitPromise\\\\n ]);\\\\n if (errors.length > 0) {\\\\n throw errors[0];\\\\n }\\\\n });\\\\n }\\\\n events() {\\\\n return this.eventObservable;\\\\n }\\\\n queue(taskFunction) {\\\\n const { maxQueuedJobs = Infinity } = this.options;\\\\n if (this.isClosing) {\\\\n throw Error(`Cannot schedule pool tasks after terminate() has been called.`);\\\\n }\\\\n if (this.initErrors.length > 0) {\\\\n throw this.initErrors[0];\\\\n }\\\\n const taskID = this.nextTaskID++;\\\\n const taskCompletion = this.taskCompletion(taskID);\\\\n taskCompletion.catch((error) => {\\\\n // Prevent unhandled rejections here as we assume the user will use\\\\n // `pool.completed()`, `pool.settled()` or `task.catch()` to handle errors\\\\n this.debug(`Task #${taskID} errored:`, error);\\\\n });\\\\n const task = {\\\\n id: taskID,\\\\n run: taskFunction,\\\\n cancel: () => {\\\\n if (this.taskQueue.indexOf(task) === -1)\\\\n return;\\\\n this.taskQueue = this.taskQueue.filter(someTask => someTask !== task);\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskCanceled,\\\\n taskID: task.id\\\\n });\\\\n },\\\\n then: taskCompletion.then.bind(taskCompletion)\\\\n };\\\\n if (this.taskQueue.length >= maxQueuedJobs) {\\\\n throw Error(\\\\\\\"Maximum number of pool tasks queued. Refusing to queue another one.\\\\\\\\n\\\\\\\" +\\\\n \\\\\\\"This usually happens for one of two reasons: We are either at peak \\\\\\\" +\\\\n \\\\\\\"workload right now or some tasks just won't finish, thus blocking the pool.\\\\\\\");\\\\n }\\\\n this.debug(`Queueing task #${task.id}...`);\\\\n this.taskQueue.push(task);\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].taskQueued,\\\\n taskID: task.id\\\\n });\\\\n this.scheduleWork();\\\\n return task;\\\\n }\\\\n terminate(force) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n this.isClosing = true;\\\\n if (!force) {\\\\n yield this.completed(true);\\\\n }\\\\n this.eventSubject.next({\\\\n type: _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"].terminated,\\\\n remainingQueue: [...this.taskQueue]\\\\n });\\\\n this.eventSubject.complete();\\\\n yield Promise.all(this.workers.map((worker) => __awaiter(this, void 0, void 0, function* () { return _thread__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"Thread\\\\\\\"].terminate(yield worker.init); })));\\\\n });\\\\n }\\\\n}\\\\nWorkerPool.EventType = _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"];\\\\n/**\\\\n * Thread pool constructor. Creates a new pool and spawns its worker threads.\\\\n */\\\\nfunction PoolConstructor(spawnWorker, optionsOrSize) {\\\\n // The function exists only so we don't need to use `new` to create a pool (we still can, though).\\\\n // If the Pool is a class or not is an implementation detail that should not concern the user.\\\\n return new WorkerPool(spawnWorker, optionsOrSize);\\\\n}\\\\nPoolConstructor.EventType = _pool_types__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"PoolEventType\\\\\\\"];\\\\n/**\\\\n * Thread pool constructor. Creates a new pool and spawns its worker threads.\\\\n */\\\\nconst Pool = PoolConstructor;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/pool.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/spawn.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/spawn.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: spawn */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"spawn\\\\\\\", function() { return spawn; });\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! debug */ \\\\\\\"./node_modules/threads/node_modules/debug/src/index.js\\\\\\\");\\\\n/* harmony import */ var debug__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(debug__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! observable-fns */ \\\\\\\"./node_modules/observable-fns/dist.esm/index.js\\\\\\\");\\\\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../common */ \\\\\\\"./node_modules/threads/dist-esm/common.js\\\\\\\");\\\\n/* harmony import */ var _promise__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../promise */ \\\\\\\"./node_modules/threads/dist-esm/promise.js\\\\\\\");\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../symbols */ \\\\\\\"./node_modules/threads/dist-esm/symbols.js\\\\\\\");\\\\n/* harmony import */ var _types_master__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../types/master */ \\\\\\\"./node_modules/threads/dist-esm/types/master.js\\\\\\\");\\\\n/* harmony import */ var _invocation_proxy__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./invocation-proxy */ \\\\\\\"./node_modules/threads/dist-esm/master/invocation-proxy.js\\\\\\\");\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nconst debugMessages = debug__WEBPACK_IMPORTED_MODULE_0___default()(\\\\\\\"threads:master:messages\\\\\\\");\\\\nconst debugSpawn = debug__WEBPACK_IMPORTED_MODULE_0___default()(\\\\\\\"threads:master:spawn\\\\\\\");\\\\nconst debugThreadUtils = debug__WEBPACK_IMPORTED_MODULE_0___default()(\\\\\\\"threads:master:thread-utils\\\\\\\");\\\\nconst isInitMessage = (data) => data && data.type === \\\\\\\"init\\\\\\\";\\\\nconst isUncaughtErrorMessage = (data) => data && data.type === \\\\\\\"uncaughtError\\\\\\\";\\\\nconst initMessageTimeout = typeof process !== \\\\\\\"undefined\\\\\\\" && process.env.THREADS_WORKER_INIT_TIMEOUT\\\\n ? Number.parseInt(process.env.THREADS_WORKER_INIT_TIMEOUT, 10)\\\\n : 10000;\\\\nfunction withTimeout(promise, timeoutInMs, errorMessage) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n let timeoutHandle;\\\\n const timeout = new Promise((resolve, reject) => {\\\\n timeoutHandle = setTimeout(() => reject(Error(errorMessage)), timeoutInMs);\\\\n });\\\\n const result = yield Promise.race([\\\\n promise,\\\\n timeout\\\\n ]);\\\\n clearTimeout(timeoutHandle);\\\\n return result;\\\\n });\\\\n}\\\\nfunction receiveInitMessage(worker) {\\\\n return new Promise((resolve, reject) => {\\\\n const messageHandler = ((event) => {\\\\n debugMessages(\\\\\\\"Message from worker before finishing initialization:\\\\\\\", event.data);\\\\n if (isInitMessage(event.data)) {\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n resolve(event.data);\\\\n }\\\\n else if (isUncaughtErrorMessage(event.data)) {\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n reject(Object(_common__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"deserialize\\\\\\\"])(event.data.error));\\\\n }\\\\n });\\\\n worker.addEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n });\\\\n}\\\\nfunction createEventObservable(worker, workerTermination) {\\\\n return new observable_fns__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"Observable\\\\\\\"](observer => {\\\\n const messageHandler = ((messageEvent) => {\\\\n const workerEvent = {\\\\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerEventType\\\\\\\"].message,\\\\n data: messageEvent.data\\\\n };\\\\n observer.next(workerEvent);\\\\n });\\\\n const rejectionHandler = ((errorEvent) => {\\\\n debugThreadUtils(\\\\\\\"Unhandled promise rejection event in thread:\\\\\\\", errorEvent);\\\\n const workerEvent = {\\\\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerEventType\\\\\\\"].internalError,\\\\n error: Error(errorEvent.reason)\\\\n };\\\\n observer.next(workerEvent);\\\\n });\\\\n worker.addEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n worker.addEventListener(\\\\\\\"unhandledrejection\\\\\\\", rejectionHandler);\\\\n workerTermination.then(() => {\\\\n const terminationEvent = {\\\\n type: _types_master__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerEventType\\\\\\\"].termination\\\\n };\\\\n worker.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n worker.removeEventListener(\\\\\\\"unhandledrejection\\\\\\\", rejectionHandler);\\\\n observer.next(terminationEvent);\\\\n observer.complete();\\\\n });\\\\n });\\\\n}\\\\nfunction createTerminator(worker) {\\\\n const [termination, resolver] = Object(_promise__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"createPromiseWithResolver\\\\\\\"])();\\\\n const terminate = () => __awaiter(this, void 0, void 0, function* () {\\\\n debugThreadUtils(\\\\\\\"Terminating worker\\\\\\\");\\\\n // Newer versions of worker_threads workers return a promise\\\\n yield worker.terminate();\\\\n resolver();\\\\n });\\\\n return { terminate, termination };\\\\n}\\\\nfunction setPrivateThreadProps(raw, worker, workerEvents, terminate) {\\\\n const workerErrors = workerEvents\\\\n .filter(event => event.type === _types_master__WEBPACK_IMPORTED_MODULE_5__[\\\\\\\"WorkerEventType\\\\\\\"].internalError)\\\\n .map(errorEvent => errorEvent.error);\\\\n // tslint:disable-next-line prefer-object-spread\\\\n return Object.assign(raw, {\\\\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"$errors\\\\\\\"]]: workerErrors,\\\\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"$events\\\\\\\"]]: workerEvents,\\\\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"$terminate\\\\\\\"]]: terminate,\\\\n [_symbols__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"$worker\\\\\\\"]]: worker\\\\n });\\\\n}\\\\n/**\\\\n * Spawn a new thread. Takes a fresh worker instance, wraps it in a thin\\\\n * abstraction layer to provide the transparent API and verifies that\\\\n * the worker has initialized successfully.\\\\n *\\\\n * @param worker Instance of `Worker`. Either a web worker, `worker_threads` worker or `tiny-worker` worker.\\\\n * @param [options]\\\\n * @param [options.timeout] Init message timeout. Default: 10000 or set by environment variable.\\\\n */\\\\nfunction spawn(worker, options) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n debugSpawn(\\\\\\\"Initializing new thread\\\\\\\");\\\\n const timeout = options && options.timeout ? options.timeout : initMessageTimeout;\\\\n const initMessage = yield withTimeout(receiveInitMessage(worker), timeout, `Timeout: Did not receive an init message from worker after ${timeout}ms. Make sure the worker calls expose().`);\\\\n const exposed = initMessage.exposed;\\\\n const { termination, terminate } = createTerminator(worker);\\\\n const events = createEventObservable(worker, termination);\\\\n if (exposed.type === \\\\\\\"function\\\\\\\") {\\\\n const proxy = Object(_invocation_proxy__WEBPACK_IMPORTED_MODULE_6__[\\\\\\\"createProxyFunction\\\\\\\"])(worker);\\\\n return setPrivateThreadProps(proxy, worker, events, terminate);\\\\n }\\\\n else if (exposed.type === \\\\\\\"module\\\\\\\") {\\\\n const proxy = Object(_invocation_proxy__WEBPACK_IMPORTED_MODULE_6__[\\\\\\\"createProxyModule\\\\\\\"])(worker, exposed.methods);\\\\n return setPrivateThreadProps(proxy, worker, events, terminate);\\\\n }\\\\n else {\\\\n const type = exposed.type;\\\\n throw Error(`Worker init message states unexpected type of expose(): ${type}`);\\\\n }\\\\n });\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/spawn.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/master/thread.js\\\":\\n/*!********************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/master/thread.js ***!\\n \\\\********************************************************/\\n/*! exports provided: Thread */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Thread\\\\\\\", function() { return Thread; });\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../symbols */ \\\\\\\"./node_modules/threads/dist-esm/symbols.js\\\\\\\");\\\\n\\\\nfunction fail(message) {\\\\n throw Error(message);\\\\n}\\\\n/** Thread utility functions. Use them to manage or inspect a `spawn()`-ed thread. */\\\\nconst Thread = {\\\\n /** Return an observable that can be used to subscribe to all errors happening in the thread. */\\\\n errors(thread) {\\\\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"$errors\\\\\\\"]] || fail(\\\\\\\"Error observable not found. Make sure to pass a thread instance as returned by the spawn() promise.\\\\\\\");\\\\n },\\\\n /** Return an observable that can be used to subscribe to internal events happening in the thread. Useful for debugging. */\\\\n events(thread) {\\\\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"$events\\\\\\\"]] || fail(\\\\\\\"Events observable not found. Make sure to pass a thread instance as returned by the spawn() promise.\\\\\\\");\\\\n },\\\\n /** Terminate a thread. Remember to terminate every thread when you are done using it. */\\\\n terminate(thread) {\\\\n return thread[_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"$terminate\\\\\\\"]]();\\\\n }\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/master/thread.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/observable-promise.js\\\":\\n/*!*************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/observable-promise.js ***!\\n \\\\*************************************************************/\\n/*! exports provided: ObservablePromise */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"ObservablePromise\\\\\\\", function() { return ObservablePromise; });\\\\n/* harmony import */ var observable_fns__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! observable-fns */ \\\\\\\"./node_modules/observable-fns/dist.esm/index.js\\\\\\\");\\\\n\\\\nconst doNothing = () => undefined;\\\\nconst returnInput = (input) => input;\\\\nconst runDeferred = (fn) => Promise.resolve().then(fn);\\\\nfunction fail(error) {\\\\n throw error;\\\\n}\\\\nfunction isThenable(thing) {\\\\n return thing && typeof thing.then === \\\\\\\"function\\\\\\\";\\\\n}\\\\n/**\\\\n * Creates a hybrid, combining the APIs of an Observable and a Promise.\\\\n *\\\\n * It is used to proxy async process states when we are initially not sure\\\\n * if that async process will yield values once (-> Promise) or multiple\\\\n * times (-> Observable).\\\\n *\\\\n * Note that the observable promise inherits some of the observable's characteristics:\\\\n * The `init` function will be called *once for every time anyone subscribes to it*.\\\\n *\\\\n * If this is undesired, derive a hot observable from it using `makeHot()` and\\\\n * subscribe to that.\\\\n */\\\\nclass ObservablePromise extends observable_fns__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"Observable\\\\\\\"] {\\\\n constructor(init) {\\\\n super((originalObserver) => {\\\\n // tslint:disable-next-line no-this-assignment\\\\n const self = this;\\\\n const observer = Object.assign(Object.assign({}, originalObserver), { complete() {\\\\n originalObserver.complete();\\\\n self.onCompletion();\\\\n }, error(error) {\\\\n originalObserver.error(error);\\\\n self.onError(error);\\\\n },\\\\n next(value) {\\\\n originalObserver.next(value);\\\\n self.onNext(value);\\\\n } });\\\\n try {\\\\n this.initHasRun = true;\\\\n return init(observer);\\\\n }\\\\n catch (error) {\\\\n observer.error(error);\\\\n }\\\\n });\\\\n this.initHasRun = false;\\\\n this.fulfillmentCallbacks = [];\\\\n this.rejectionCallbacks = [];\\\\n this.firstValueSet = false;\\\\n this.state = \\\\\\\"pending\\\\\\\";\\\\n }\\\\n onNext(value) {\\\\n if (!this.firstValueSet) {\\\\n this.firstValue = value;\\\\n this.firstValueSet = true;\\\\n }\\\\n }\\\\n onError(error) {\\\\n this.state = \\\\\\\"rejected\\\\\\\";\\\\n this.rejection = error;\\\\n for (const onRejected of this.rejectionCallbacks) {\\\\n // Promisifying the call to turn errors into unhandled promise rejections\\\\n // instead of them failing sync and cancelling the iteration\\\\n runDeferred(() => onRejected(error));\\\\n }\\\\n }\\\\n onCompletion() {\\\\n this.state = \\\\\\\"fulfilled\\\\\\\";\\\\n for (const onFulfilled of this.fulfillmentCallbacks) {\\\\n // Promisifying the call to turn errors into unhandled promise rejections\\\\n // instead of them failing sync and cancelling the iteration\\\\n runDeferred(() => onFulfilled(this.firstValue));\\\\n }\\\\n }\\\\n then(onFulfilledRaw, onRejectedRaw) {\\\\n const onFulfilled = onFulfilledRaw || returnInput;\\\\n const onRejected = onRejectedRaw || fail;\\\\n let onRejectedCalled = false;\\\\n return new Promise((resolve, reject) => {\\\\n const rejectionCallback = (error) => {\\\\n if (onRejectedCalled)\\\\n return;\\\\n onRejectedCalled = true;\\\\n try {\\\\n resolve(onRejected(error));\\\\n }\\\\n catch (anotherError) {\\\\n reject(anotherError);\\\\n }\\\\n };\\\\n const fulfillmentCallback = (value) => {\\\\n try {\\\\n resolve(onFulfilled(value));\\\\n }\\\\n catch (error) {\\\\n rejectionCallback(error);\\\\n }\\\\n };\\\\n if (!this.initHasRun) {\\\\n this.subscribe({ error: rejectionCallback });\\\\n }\\\\n if (this.state === \\\\\\\"fulfilled\\\\\\\") {\\\\n return resolve(onFulfilled(this.firstValue));\\\\n }\\\\n if (this.state === \\\\\\\"rejected\\\\\\\") {\\\\n onRejectedCalled = true;\\\\n return resolve(onRejected(this.rejection));\\\\n }\\\\n this.fulfillmentCallbacks.push(fulfillmentCallback);\\\\n this.rejectionCallbacks.push(rejectionCallback);\\\\n });\\\\n }\\\\n catch(onRejected) {\\\\n return this.then(undefined, onRejected);\\\\n }\\\\n finally(onCompleted) {\\\\n const handler = onCompleted || doNothing;\\\\n return this.then((value) => {\\\\n handler();\\\\n return value;\\\\n }, () => handler());\\\\n }\\\\n static from(thing) {\\\\n if (isThenable(thing)) {\\\\n return new ObservablePromise(observer => {\\\\n const onFulfilled = (value) => {\\\\n observer.next(value);\\\\n observer.complete();\\\\n };\\\\n const onRejected = (error) => {\\\\n observer.error(error);\\\\n };\\\\n thing.then(onFulfilled, onRejected);\\\\n });\\\\n }\\\\n else {\\\\n return super.from(thing);\\\\n }\\\\n }\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/observable-promise.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/ponyfills.js\\\":\\n/*!****************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/ponyfills.js ***!\\n \\\\****************************************************/\\n/*! exports provided: allSettled */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"allSettled\\\\\\\", function() { return allSettled; });\\\\n// Based on <https://github.com/es-shims/Promise.allSettled/blob/master/implementation.js>\\\\nfunction allSettled(values) {\\\\n return Promise.all(values.map(item => {\\\\n const onFulfill = (value) => {\\\\n return { status: 'fulfilled', value };\\\\n };\\\\n const onReject = (reason) => {\\\\n return { status: 'rejected', reason };\\\\n };\\\\n const itemPromise = Promise.resolve(item);\\\\n try {\\\\n return itemPromise.then(onFulfill, onReject);\\\\n }\\\\n catch (error) {\\\\n return Promise.reject(error);\\\\n }\\\\n }));\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/ponyfills.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/promise.js\\\":\\n/*!**************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/promise.js ***!\\n \\\\**************************************************/\\n/*! exports provided: createPromiseWithResolver */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"createPromiseWithResolver\\\\\\\", function() { return createPromiseWithResolver; });\\\\nconst doNothing = () => undefined;\\\\n/**\\\\n * Creates a new promise and exposes its resolver function.\\\\n * Use with care!\\\\n */\\\\nfunction createPromiseWithResolver() {\\\\n let alreadyResolved = false;\\\\n let resolvedTo;\\\\n let resolver = doNothing;\\\\n const promise = new Promise(resolve => {\\\\n if (alreadyResolved) {\\\\n resolve(resolvedTo);\\\\n }\\\\n else {\\\\n resolver = resolve;\\\\n }\\\\n });\\\\n const exposedResolver = (value) => {\\\\n alreadyResolved = true;\\\\n resolvedTo = value;\\\\n resolver(resolvedTo);\\\\n };\\\\n return [promise, exposedResolver];\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/promise.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/serializers.js\\\":\\n/*!******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/serializers.js ***!\\n \\\\******************************************************/\\n/*! exports provided: extendSerializer, DefaultSerializer */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"extendSerializer\\\\\\\", function() { return extendSerializer; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"DefaultSerializer\\\\\\\", function() { return DefaultSerializer; });\\\\nfunction extendSerializer(extend, implementation) {\\\\n const fallbackDeserializer = extend.deserialize.bind(extend);\\\\n const fallbackSerializer = extend.serialize.bind(extend);\\\\n return {\\\\n deserialize(message) {\\\\n return implementation.deserialize(message, fallbackDeserializer);\\\\n },\\\\n serialize(input) {\\\\n return implementation.serialize(input, fallbackSerializer);\\\\n }\\\\n };\\\\n}\\\\nconst DefaultErrorSerializer = {\\\\n deserialize(message) {\\\\n return Object.assign(Error(message.message), {\\\\n name: message.name,\\\\n stack: message.stack\\\\n });\\\\n },\\\\n serialize(error) {\\\\n return {\\\\n __error_marker: \\\\\\\"$$error\\\\\\\",\\\\n message: error.message,\\\\n name: error.name,\\\\n stack: error.stack\\\\n };\\\\n }\\\\n};\\\\nconst isSerializedError = (thing) => thing && typeof thing === \\\\\\\"object\\\\\\\" && \\\\\\\"__error_marker\\\\\\\" in thing && thing.__error_marker === \\\\\\\"$$error\\\\\\\";\\\\nconst DefaultSerializer = {\\\\n deserialize(message) {\\\\n if (isSerializedError(message)) {\\\\n return DefaultErrorSerializer.deserialize(message);\\\\n }\\\\n else {\\\\n return message;\\\\n }\\\\n },\\\\n serialize(input) {\\\\n if (input instanceof Error) {\\\\n return DefaultErrorSerializer.serialize(input);\\\\n }\\\\n else {\\\\n return input;\\\\n }\\\\n }\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/serializers.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/symbols.js\\\":\\n/*!**************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/symbols.js ***!\\n \\\\**************************************************/\\n/*! exports provided: $errors, $events, $terminate, $transferable, $worker */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"$errors\\\\\\\", function() { return $errors; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"$events\\\\\\\", function() { return $events; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"$terminate\\\\\\\", function() { return $terminate; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"$transferable\\\\\\\", function() { return $transferable; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"$worker\\\\\\\", function() { return $worker; });\\\\nconst $errors = Symbol(\\\\\\\"thread.errors\\\\\\\");\\\\nconst $events = Symbol(\\\\\\\"thread.events\\\\\\\");\\\\nconst $terminate = Symbol(\\\\\\\"thread.terminate\\\\\\\");\\\\nconst $transferable = Symbol(\\\\\\\"thread.transferable\\\\\\\");\\\\nconst $worker = Symbol(\\\\\\\"thread.worker\\\\\\\");\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/symbols.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/transferable.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/transferable.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: isTransferDescriptor, Transfer */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isTransferDescriptor\\\\\\\", function() { return isTransferDescriptor; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Transfer\\\\\\\", function() { return Transfer; });\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./symbols */ \\\\\\\"./node_modules/threads/dist-esm/symbols.js\\\\\\\");\\\\n\\\\nfunction isTransferable(thing) {\\\\n if (!thing || typeof thing !== \\\\\\\"object\\\\\\\")\\\\n return false;\\\\n // Don't check too thoroughly, since the list of transferable things in JS might grow over time\\\\n return true;\\\\n}\\\\nfunction isTransferDescriptor(thing) {\\\\n return thing && typeof thing === \\\\\\\"object\\\\\\\" && thing[_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"$transferable\\\\\\\"]];\\\\n}\\\\nfunction Transfer(payload, transferables) {\\\\n if (!transferables) {\\\\n if (!isTransferable(payload))\\\\n throw Error();\\\\n transferables = [payload];\\\\n }\\\\n return {\\\\n [_symbols__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"$transferable\\\\\\\"]]: true,\\\\n send: payload,\\\\n transferables\\\\n };\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/transferable.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/types/master.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/types/master.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: WorkerEventType */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"WorkerEventType\\\\\\\", function() { return WorkerEventType; });\\\\n/* harmony import */ var _symbols__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../symbols */ \\\\\\\"./node_modules/threads/dist-esm/symbols.js\\\\\\\");\\\\n/// <reference lib=\\\\\\\"dom\\\\\\\" />\\\\n// tslint:disable max-classes-per-file\\\\n\\\\n/** Event as emitted by worker thread. Subscribe to using `Thread.events(thread)`. */\\\\nvar WorkerEventType;\\\\n(function (WorkerEventType) {\\\\n WorkerEventType[\\\\\\\"internalError\\\\\\\"] = \\\\\\\"internalError\\\\\\\";\\\\n WorkerEventType[\\\\\\\"message\\\\\\\"] = \\\\\\\"message\\\\\\\";\\\\n WorkerEventType[\\\\\\\"termination\\\\\\\"] = \\\\\\\"termination\\\\\\\";\\\\n})(WorkerEventType || (WorkerEventType = {}));\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/types/master.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/types/messages.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/types/messages.js ***!\\n \\\\*********************************************************/\\n/*! exports provided: MasterMessageType, WorkerMessageType */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"MasterMessageType\\\\\\\", function() { return MasterMessageType; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"WorkerMessageType\\\\\\\", function() { return WorkerMessageType; });\\\\n/////////////////////////////\\\\n// Messages sent by master:\\\\nvar MasterMessageType;\\\\n(function (MasterMessageType) {\\\\n MasterMessageType[\\\\\\\"cancel\\\\\\\"] = \\\\\\\"cancel\\\\\\\";\\\\n MasterMessageType[\\\\\\\"run\\\\\\\"] = \\\\\\\"run\\\\\\\";\\\\n})(MasterMessageType || (MasterMessageType = {}));\\\\n////////////////////////////\\\\n// Messages sent by worker:\\\\nvar WorkerMessageType;\\\\n(function (WorkerMessageType) {\\\\n WorkerMessageType[\\\\\\\"error\\\\\\\"] = \\\\\\\"error\\\\\\\";\\\\n WorkerMessageType[\\\\\\\"init\\\\\\\"] = \\\\\\\"init\\\\\\\";\\\\n WorkerMessageType[\\\\\\\"result\\\\\\\"] = \\\\\\\"result\\\\\\\";\\\\n WorkerMessageType[\\\\\\\"running\\\\\\\"] = \\\\\\\"running\\\\\\\";\\\\n WorkerMessageType[\\\\\\\"uncaughtError\\\\\\\"] = \\\\\\\"uncaughtError\\\\\\\";\\\\n})(WorkerMessageType || (WorkerMessageType = {}));\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/types/messages.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/implementation.browser.js\\\":\\n/*!************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.browser.js ***!\\n \\\\************************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/// <reference lib=\\\\\\\"dom\\\\\\\" />\\\\n// tslint:disable no-shadowed-variable\\\\nconst isWorkerRuntime = function isWorkerRuntime() {\\\\n const isWindowContext = typeof self !== \\\\\\\"undefined\\\\\\\" && typeof Window !== \\\\\\\"undefined\\\\\\\" && self instanceof Window;\\\\n return typeof self !== \\\\\\\"undefined\\\\\\\" && self.postMessage && !isWindowContext ? true : false;\\\\n};\\\\nconst postMessageToMaster = function postMessageToMaster(data, transferList) {\\\\n self.postMessage(data, transferList);\\\\n};\\\\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\\\\n const messageHandler = (messageEvent) => {\\\\n onMessage(messageEvent.data);\\\\n };\\\\n const unsubscribe = () => {\\\\n self.removeEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n };\\\\n self.addEventListener(\\\\\\\"message\\\\\\\", messageHandler);\\\\n return unsubscribe;\\\\n};\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = ({\\\\n isWorkerRuntime,\\\\n postMessageToMaster,\\\\n subscribeToMasterMessages\\\\n});\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.browser.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/implementation.js\\\":\\n/*!****************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.js ***!\\n \\\\****************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _implementation_browser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./implementation.browser */ \\\\\\\"./node_modules/threads/dist-esm/worker/implementation.browser.js\\\\\\\");\\\\n/* harmony import */ var _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./implementation.tiny-worker */ \\\\\\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\\\\\");\\\\n/* harmony import */ var _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1__);\\\\n/* harmony import */ var _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./implementation.worker_threads */ \\\\\\\"./node_modules/threads/dist-esm/worker/implementation.worker_threads.js\\\\\\\");\\\\n// tslint:disable no-var-requires\\\\n/*\\\\n * This file is only a stub to make './implementation' resolve to the right module.\\\\n */\\\\n\\\\n\\\\n\\\\nconst runningInNode = typeof process !== 'undefined' && process.arch !== 'browser' && 'pid' in process;\\\\nfunction selectNodeImplementation() {\\\\n try {\\\\n _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"].testImplementation();\\\\n return _implementation_worker_threads__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"default\\\\\\\"];\\\\n }\\\\n catch (error) {\\\\n return _implementation_tiny_worker__WEBPACK_IMPORTED_MODULE_1___default.a;\\\\n }\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = (runningInNode\\\\n ? selectNodeImplementation()\\\\n : _implementation_browser__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"]);\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\":\\n/*!****************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js ***!\\n \\\\****************************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"module.exports = function() {\\\\n return __webpack_require__(/*! !./node_modules/worker-loader/dist/workers/InlineWorker.js */ \\\\\\\"./node_modules/worker-loader/dist/workers/InlineWorker.js\\\\\\\")(\\\\\\\"/******/ (function(modules) { // webpackBootstrap\\\\\\\\n/******/ \\\\\\\\t// The module cache\\\\\\\\n/******/ \\\\\\\\tvar installedModules = {};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// The require function\\\\\\\\n/******/ \\\\\\\\tfunction __webpack_require__(moduleId) {\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t// Check if module is in cache\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(installedModules[moduleId]) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\treturn installedModules[moduleId].exports;\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t}\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t// Create a new module (and put it into the cache)\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tvar module = installedModules[moduleId] = {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\ti: moduleId,\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\tl: false,\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\texports: {}\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t// Execute the module function\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t// Flag the module as loaded\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tmodule.l = true;\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t// Return the exports of the module\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\treturn module.exports;\\\\\\\\n/******/ \\\\\\\\t}\\\\\\\\n/******/\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// expose the modules object (__webpack_modules__)\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.m = modules;\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// expose the module cache\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.c = installedModules;\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// define getter function for harmony exports\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.d = function(exports, name, getter) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(!__webpack_require__.o(exports, name)) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t}\\\\\\\\n/******/ \\\\\\\\t};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// define __esModule on exports\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.r = function(exports) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t}\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tObject.defineProperty(exports, '__esModule', { value: true });\\\\\\\\n/******/ \\\\\\\\t};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// create a fake namespace object\\\\\\\\n/******/ \\\\\\\\t// mode & 1: value is a module id, require it\\\\\\\\n/******/ \\\\\\\\t// mode & 2: merge all properties of value into the ns\\\\\\\\n/******/ \\\\\\\\t// mode & 4: return value when already ns object\\\\\\\\n/******/ \\\\\\\\t// mode & 8|1: behave like require\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.t = function(value, mode) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(mode & 1) value = __webpack_require__(value);\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(mode & 8) return value;\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tvar ns = Object.create(null);\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t__webpack_require__.r(ns);\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\treturn ns;\\\\\\\\n/******/ \\\\\\\\t};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// getDefaultExport function for compatibility with non-harmony modules\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.n = function(module) {\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\tvar getter = module && module.__esModule ?\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\tfunction getDefault() { return module['default']; } :\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t\\\\\\\\tfunction getModuleExports() { return module; };\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\t__webpack_require__.d(getter, 'a', getter);\\\\\\\\n/******/ \\\\\\\\t\\\\\\\\treturn getter;\\\\\\\\n/******/ \\\\\\\\t};\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// Object.prototype.hasOwnProperty.call\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// __webpack_public_path__\\\\\\\\n/******/ \\\\\\\\t__webpack_require__.p = \\\\\\\\\\\\\\\"\\\\\\\\\\\\\\\";\\\\\\\\n/******/\\\\\\\\n/******/\\\\\\\\n/******/ \\\\\\\\t// Load entry module and return exports\\\\\\\\n/******/ \\\\\\\\treturn __webpack_require__(__webpack_require__.s = \\\\\\\\\\\\\\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\\\\\\\\\\\\\");\\\\\\\\n/******/ })\\\\\\\\n/************************************************************************/\\\\\\\\n/******/ ({\\\\\\\\n\\\\\\\\n/***/ \\\\\\\\\\\\\\\"./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js\\\\\\\\\\\\\\\":\\\\\\\\n/*!****************************************************************************!*\\\\\\\\\\\\\\\\\\\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js ***!\\\\\\\\n \\\\\\\\\\\\\\\\****************************************************************************/\\\\\\\\n/*! exports provided: default */\\\\\\\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\\\\\\\n\\\\\\\\n\\\\\\\\\\\\\\\"use strict\\\\\\\\\\\\\\\";\\\\\\\\neval(\\\\\\\\\\\\\\\"__webpack_require__.r(__webpack_exports__);\\\\\\\\\\\\\\\\n/// <reference lib=\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"dom\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" />\\\\\\\\\\\\\\\\n// tslint:disable no-shadowed-variable\\\\\\\\\\\\\\\\nif (typeof self === \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"undefined\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\") {\\\\\\\\\\\\\\\\n global.self = global;\\\\\\\\\\\\\\\\n}\\\\\\\\\\\\\\\\nconst isWorkerRuntime = function isWorkerRuntime() {\\\\\\\\\\\\\\\\n return typeof self !== \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"undefined\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" && self.postMessage ? true : false;\\\\\\\\\\\\\\\\n};\\\\\\\\\\\\\\\\nconst postMessageToMaster = function postMessageToMaster(data) {\\\\\\\\\\\\\\\\n // TODO: Warn that Transferables are not supported on first attempt to use feature\\\\\\\\\\\\\\\\n self.postMessage(data);\\\\\\\\\\\\\\\\n};\\\\\\\\\\\\\\\\nlet muxingHandlerSetUp = false;\\\\\\\\\\\\\\\\nconst messageHandlers = new Set();\\\\\\\\\\\\\\\\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\\\\\\\\\\\\\\\\n if (!muxingHandlerSetUp) {\\\\\\\\\\\\\\\\n // We have one multiplexing message handler as tiny-worker's\\\\\\\\\\\\\\\\n // addEventListener() only allows you to set a single message handler\\\\\\\\\\\\\\\\n self.addEventListener(\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"message\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\", ((event) => {\\\\\\\\\\\\\\\\n messageHandlers.forEach(handler => handler(event.data));\\\\\\\\\\\\\\\\n }));\\\\\\\\\\\\\\\\n muxingHandlerSetUp = true;\\\\\\\\\\\\\\\\n }\\\\\\\\\\\\\\\\n messageHandlers.add(onMessage);\\\\\\\\\\\\\\\\n const unsubscribe = () => messageHandlers.delete(onMessage);\\\\\\\\\\\\\\\\n return unsubscribe;\\\\\\\\\\\\\\\\n};\\\\\\\\\\\\\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"default\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"] = ({\\\\\\\\\\\\\\\\n isWorkerRuntime,\\\\\\\\\\\\\\\\n postMessageToMaster,\\\\\\\\\\\\\\\\n subscribeToMasterMessages\\\\\\\\\\\\\\\\n});\\\\\\\\\\\\\\\\n\\\\\\\\\\\\\\\\n\\\\\\\\\\\\\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js?\\\\\\\\\\\\\\\");\\\\\\\\n\\\\\\\\n/***/ })\\\\\\\\n\\\\\\\\n/******/ });\\\\\\\", null);\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.tiny-worker.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/implementation.worker_threads.js\\\":\\n/*!*******************************************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/implementation.worker_threads.js ***!\\n \\\\*******************************************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _worker_threads__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../worker_threads */ \\\\\\\"./node_modules/threads/dist-esm/worker_threads.js\\\\\\\");\\\\n\\\\nfunction assertMessagePort(port) {\\\\n if (!port) {\\\\n throw Error(\\\\\\\"Invariant violation: MessagePort to parent is not available.\\\\\\\");\\\\n }\\\\n return port;\\\\n}\\\\nconst isWorkerRuntime = function isWorkerRuntime() {\\\\n return !Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"])().isMainThread;\\\\n};\\\\nconst postMessageToMaster = function postMessageToMaster(data, transferList) {\\\\n assertMessagePort(Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"])().parentPort).postMessage(data, transferList);\\\\n};\\\\nconst subscribeToMasterMessages = function subscribeToMasterMessages(onMessage) {\\\\n const parentPort = Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"])().parentPort;\\\\n if (!parentPort) {\\\\n throw Error(\\\\\\\"Invariant violation: MessagePort to parent is not available.\\\\\\\");\\\\n }\\\\n const messageHandler = (message) => {\\\\n onMessage(message);\\\\n };\\\\n const unsubscribe = () => {\\\\n assertMessagePort(parentPort).off(\\\\\\\"message\\\\\\\", messageHandler);\\\\n };\\\\n assertMessagePort(parentPort).on(\\\\\\\"message\\\\\\\", messageHandler);\\\\n return unsubscribe;\\\\n};\\\\nfunction testImplementation() {\\\\n // Will throw if `worker_threads` are not available\\\\n Object(_worker_threads__WEBPACK_IMPORTED_MODULE_0__[\\\\\\\"default\\\\\\\"])();\\\\n}\\\\n/* harmony default export */ __webpack_exports__[\\\\\\\"default\\\\\\\"] = ({\\\\n isWorkerRuntime,\\\\n postMessageToMaster,\\\\n subscribeToMasterMessages,\\\\n testImplementation\\\\n});\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/implementation.worker_threads.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker/index.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker/index.js ***!\\n \\\\*******************************************************/\\n/*! exports provided: registerSerializer, Transfer, isWorkerRuntime, expose */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"isWorkerRuntime\\\\\\\", function() { return isWorkerRuntime; });\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"expose\\\\\\\", function() { return expose; });\\\\n/* harmony import */ var is_observable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! is-observable */ \\\\\\\"./node_modules/is-observable/index.js\\\\\\\");\\\\n/* harmony import */ var is_observable__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(is_observable__WEBPACK_IMPORTED_MODULE_0__);\\\\n/* harmony import */ var _common__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../common */ \\\\\\\"./node_modules/threads/dist-esm/common.js\\\\\\\");\\\\n/* harmony import */ var _transferable__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../transferable */ \\\\\\\"./node_modules/threads/dist-esm/transferable.js\\\\\\\");\\\\n/* harmony import */ var _types_messages__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../types/messages */ \\\\\\\"./node_modules/threads/dist-esm/types/messages.js\\\\\\\");\\\\n/* harmony import */ var _implementation__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./implementation */ \\\\\\\"./node_modules/threads/dist-esm/worker/implementation.js\\\\\\\");\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"registerSerializer\\\\\\\", function() { return _common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"registerSerializer\\\\\\\"]; });\\\\n\\\\n/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"Transfer\\\\\\\", function() { return _transferable__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"Transfer\\\\\\\"]; });\\\\n\\\\nvar __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {\\\\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\\\\n return new (P || (P = Promise))(function (resolve, reject) {\\\\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\\\\n function rejected(value) { try { step(generator[\\\\\\\"throw\\\\\\\"](value)); } catch (e) { reject(e); } }\\\\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\\\\n step((generator = generator.apply(thisArg, _arguments || [])).next());\\\\n });\\\\n};\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n/** Returns `true` if this code is currently running in a worker. */\\\\nconst isWorkerRuntime = _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].isWorkerRuntime;\\\\nlet exposeCalled = false;\\\\nconst activeSubscriptions = new Map();\\\\nconst isMasterJobCancelMessage = (thing) => thing && thing.type === _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"MasterMessageType\\\\\\\"].cancel;\\\\nconst isMasterJobRunMessage = (thing) => thing && thing.type === _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"MasterMessageType\\\\\\\"].run;\\\\n/**\\\\n * There are issues with `is-observable` not recognizing zen-observable's instances.\\\\n * We are using `observable-fns`, but it's based on zen-observable, too.\\\\n */\\\\nconst isObservable = (thing) => is_observable__WEBPACK_IMPORTED_MODULE_0___default()(thing) || isZenObservable(thing);\\\\nfunction isZenObservable(thing) {\\\\n return thing && typeof thing === \\\\\\\"object\\\\\\\" && typeof thing.subscribe === \\\\\\\"function\\\\\\\";\\\\n}\\\\nfunction deconstructTransfer(thing) {\\\\n return Object(_transferable__WEBPACK_IMPORTED_MODULE_2__[\\\\\\\"isTransferDescriptor\\\\\\\"])(thing)\\\\n ? { payload: thing.send, transferables: thing.transferables }\\\\n : { payload: thing, transferables: undefined };\\\\n}\\\\nfunction postFunctionInitMessage() {\\\\n const initMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].init,\\\\n exposed: {\\\\n type: \\\\\\\"function\\\\\\\"\\\\n }\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(initMessage);\\\\n}\\\\nfunction postModuleInitMessage(methodNames) {\\\\n const initMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].init,\\\\n exposed: {\\\\n type: \\\\\\\"module\\\\\\\",\\\\n methods: methodNames\\\\n }\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(initMessage);\\\\n}\\\\nfunction postJobErrorMessage(uid, rawError) {\\\\n const { payload: error, transferables } = deconstructTransfer(rawError);\\\\n const errorMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].error,\\\\n uid,\\\\n error: Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(error)\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(errorMessage, transferables);\\\\n}\\\\nfunction postJobResultMessage(uid, completed, resultValue) {\\\\n const { payload, transferables } = deconstructTransfer(resultValue);\\\\n const resultMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].result,\\\\n uid,\\\\n complete: completed ? true : undefined,\\\\n payload\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(resultMessage, transferables);\\\\n}\\\\nfunction postJobStartMessage(uid, resultType) {\\\\n const startMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].running,\\\\n uid,\\\\n resultType\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(startMessage);\\\\n}\\\\nfunction postUncaughtErrorMessage(error) {\\\\n try {\\\\n const errorMessage = {\\\\n type: _types_messages__WEBPACK_IMPORTED_MODULE_3__[\\\\\\\"WorkerMessageType\\\\\\\"].uncaughtError,\\\\n error: Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(error)\\\\n };\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].postMessageToMaster(errorMessage);\\\\n }\\\\n catch (subError) {\\\\n // tslint:disable-next-line no-console\\\\n console.error(\\\\\\\"Not reporting uncaught error back to master thread as it \\\\\\\" +\\\\n \\\\\\\"occured while reporting an uncaught error already.\\\\\\\" +\\\\n \\\\\\\"\\\\\\\\nLatest error:\\\\\\\", subError, \\\\\\\"\\\\\\\\nOriginal error:\\\\\\\", error);\\\\n }\\\\n}\\\\nfunction runFunction(jobUID, fn, args) {\\\\n return __awaiter(this, void 0, void 0, function* () {\\\\n let syncResult;\\\\n try {\\\\n syncResult = fn(...args);\\\\n }\\\\n catch (error) {\\\\n return postJobErrorMessage(jobUID, error);\\\\n }\\\\n const resultType = isObservable(syncResult) ? \\\\\\\"observable\\\\\\\" : \\\\\\\"promise\\\\\\\";\\\\n postJobStartMessage(jobUID, resultType);\\\\n if (isObservable(syncResult)) {\\\\n const subscription = syncResult.subscribe(value => postJobResultMessage(jobUID, false, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(value)), error => {\\\\n postJobErrorMessage(jobUID, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(error));\\\\n activeSubscriptions.delete(jobUID);\\\\n }, () => {\\\\n postJobResultMessage(jobUID, true);\\\\n activeSubscriptions.delete(jobUID);\\\\n });\\\\n activeSubscriptions.set(jobUID, subscription);\\\\n }\\\\n else {\\\\n try {\\\\n const result = yield syncResult;\\\\n postJobResultMessage(jobUID, true, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(result));\\\\n }\\\\n catch (error) {\\\\n postJobErrorMessage(jobUID, Object(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"serialize\\\\\\\"])(error));\\\\n }\\\\n }\\\\n });\\\\n}\\\\n/**\\\\n * Expose a function or a module (an object whose values are functions)\\\\n * to the main thread. Must be called exactly once in every worker thread\\\\n * to signal its API to the main thread.\\\\n *\\\\n * @param exposed Function or object whose values are functions\\\\n */\\\\nfunction expose(exposed) {\\\\n if (!_implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].isWorkerRuntime()) {\\\\n throw Error(\\\\\\\"expose() called in the master thread.\\\\\\\");\\\\n }\\\\n if (exposeCalled) {\\\\n throw Error(\\\\\\\"expose() called more than once. This is not possible. Pass an object to expose() if you want to expose multiple functions.\\\\\\\");\\\\n }\\\\n exposeCalled = true;\\\\n if (typeof exposed === \\\\\\\"function\\\\\\\") {\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].subscribeToMasterMessages(messageData => {\\\\n if (isMasterJobRunMessage(messageData) && !messageData.method) {\\\\n runFunction(messageData.uid, exposed, messageData.args.map(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"deserialize\\\\\\\"]));\\\\n }\\\\n });\\\\n postFunctionInitMessage();\\\\n }\\\\n else if (typeof exposed === \\\\\\\"object\\\\\\\" && exposed) {\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].subscribeToMasterMessages(messageData => {\\\\n if (isMasterJobRunMessage(messageData) && messageData.method) {\\\\n runFunction(messageData.uid, exposed[messageData.method], messageData.args.map(_common__WEBPACK_IMPORTED_MODULE_1__[\\\\\\\"deserialize\\\\\\\"]));\\\\n }\\\\n });\\\\n const methodNames = Object.keys(exposed).filter(key => typeof exposed[key] === \\\\\\\"function\\\\\\\");\\\\n postModuleInitMessage(methodNames);\\\\n }\\\\n else {\\\\n throw Error(`Invalid argument passed to expose(). Expected a function or an object, got: ${exposed}`);\\\\n }\\\\n _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].subscribeToMasterMessages(messageData => {\\\\n if (isMasterJobCancelMessage(messageData)) {\\\\n const jobUID = messageData.uid;\\\\n const subscription = activeSubscriptions.get(jobUID);\\\\n if (subscription) {\\\\n subscription.unsubscribe();\\\\n activeSubscriptions.delete(jobUID);\\\\n }\\\\n }\\\\n });\\\\n}\\\\nif (typeof self !== \\\\\\\"undefined\\\\\\\" && typeof self.addEventListener === \\\\\\\"function\\\\\\\" && _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].isWorkerRuntime()) {\\\\n self.addEventListener(\\\\\\\"error\\\\\\\", event => {\\\\n // Post with some delay, so the master had some time to subscribe to messages\\\\n setTimeout(() => postUncaughtErrorMessage(event.error || event), 250);\\\\n });\\\\n self.addEventListener(\\\\\\\"unhandledrejection\\\\\\\", event => {\\\\n const error = event.reason;\\\\n if (error && typeof error.message === \\\\\\\"string\\\\\\\") {\\\\n // Post with some delay, so the master had some time to subscribe to messages\\\\n setTimeout(() => postUncaughtErrorMessage(error), 250);\\\\n }\\\\n });\\\\n}\\\\nif (typeof process !== \\\\\\\"undefined\\\\\\\" && typeof process.on === \\\\\\\"function\\\\\\\" && _implementation__WEBPACK_IMPORTED_MODULE_4__[\\\\\\\"default\\\\\\\"].isWorkerRuntime()) {\\\\n process.on(\\\\\\\"uncaughtException\\\\\\\", (error) => {\\\\n // Post with some delay, so the master had some time to subscribe to messages\\\\n setTimeout(() => postUncaughtErrorMessage(error), 250);\\\\n });\\\\n process.on(\\\\\\\"unhandledRejection\\\\\\\", (error) => {\\\\n if (error && typeof error.message === \\\\\\\"string\\\\\\\") {\\\\n // Post with some delay, so the master had some time to subscribe to messages\\\\n setTimeout(() => postUncaughtErrorMessage(error), 250);\\\\n }\\\\n });\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/dist-esm/worker_threads.js\\\":\\n/*!*********************************************************!*\\\\\\n !*** ./node_modules/threads/dist-esm/worker_threads.js ***!\\n \\\\*********************************************************/\\n/*! exports provided: default */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, \\\\\\\"default\\\\\\\", function() { return getImplementation; });\\\\n// Webpack hack\\\\n// tslint:disable no-eval\\\\nlet implementation;\\\\nfunction selectImplementation() {\\\\n return typeof require === \\\\\\\"function\\\\\\\"\\\\n ? require(\\\\\\\"worker_threads\\\\\\\")\\\\n : eval(\\\\\\\"require\\\\\\\")(\\\\\\\"worker_threads\\\\\\\");\\\\n}\\\\nfunction getImplementation() {\\\\n if (!implementation) {\\\\n implementation = selectImplementation();\\\\n }\\\\n return implementation;\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/dist-esm/worker_threads.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/node_modules/debug/src/browser.js\\\":\\n/*!****************************************************************!*\\\\\\n !*** ./node_modules/threads/node_modules/debug/src/browser.js ***!\\n \\\\****************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"/* eslint-env browser */\\\\n\\\\n/**\\\\n * This is the web browser implementation of `debug()`.\\\\n */\\\\n\\\\nexports.formatArgs = formatArgs;\\\\nexports.save = save;\\\\nexports.load = load;\\\\nexports.useColors = useColors;\\\\nexports.storage = localstorage();\\\\nexports.destroy = (() => {\\\\n\\\\tlet warned = false;\\\\n\\\\n\\\\treturn () => {\\\\n\\\\t\\\\tif (!warned) {\\\\n\\\\t\\\\t\\\\twarned = true;\\\\n\\\\t\\\\t\\\\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\\\\n\\\\t\\\\t}\\\\n\\\\t};\\\\n})();\\\\n\\\\n/**\\\\n * Colors.\\\\n */\\\\n\\\\nexports.colors = [\\\\n\\\\t'#0000CC',\\\\n\\\\t'#0000FF',\\\\n\\\\t'#0033CC',\\\\n\\\\t'#0033FF',\\\\n\\\\t'#0066CC',\\\\n\\\\t'#0066FF',\\\\n\\\\t'#0099CC',\\\\n\\\\t'#0099FF',\\\\n\\\\t'#00CC00',\\\\n\\\\t'#00CC33',\\\\n\\\\t'#00CC66',\\\\n\\\\t'#00CC99',\\\\n\\\\t'#00CCCC',\\\\n\\\\t'#00CCFF',\\\\n\\\\t'#3300CC',\\\\n\\\\t'#3300FF',\\\\n\\\\t'#3333CC',\\\\n\\\\t'#3333FF',\\\\n\\\\t'#3366CC',\\\\n\\\\t'#3366FF',\\\\n\\\\t'#3399CC',\\\\n\\\\t'#3399FF',\\\\n\\\\t'#33CC00',\\\\n\\\\t'#33CC33',\\\\n\\\\t'#33CC66',\\\\n\\\\t'#33CC99',\\\\n\\\\t'#33CCCC',\\\\n\\\\t'#33CCFF',\\\\n\\\\t'#6600CC',\\\\n\\\\t'#6600FF',\\\\n\\\\t'#6633CC',\\\\n\\\\t'#6633FF',\\\\n\\\\t'#66CC00',\\\\n\\\\t'#66CC33',\\\\n\\\\t'#9900CC',\\\\n\\\\t'#9900FF',\\\\n\\\\t'#9933CC',\\\\n\\\\t'#9933FF',\\\\n\\\\t'#99CC00',\\\\n\\\\t'#99CC33',\\\\n\\\\t'#CC0000',\\\\n\\\\t'#CC0033',\\\\n\\\\t'#CC0066',\\\\n\\\\t'#CC0099',\\\\n\\\\t'#CC00CC',\\\\n\\\\t'#CC00FF',\\\\n\\\\t'#CC3300',\\\\n\\\\t'#CC3333',\\\\n\\\\t'#CC3366',\\\\n\\\\t'#CC3399',\\\\n\\\\t'#CC33CC',\\\\n\\\\t'#CC33FF',\\\\n\\\\t'#CC6600',\\\\n\\\\t'#CC6633',\\\\n\\\\t'#CC9900',\\\\n\\\\t'#CC9933',\\\\n\\\\t'#CCCC00',\\\\n\\\\t'#CCCC33',\\\\n\\\\t'#FF0000',\\\\n\\\\t'#FF0033',\\\\n\\\\t'#FF0066',\\\\n\\\\t'#FF0099',\\\\n\\\\t'#FF00CC',\\\\n\\\\t'#FF00FF',\\\\n\\\\t'#FF3300',\\\\n\\\\t'#FF3333',\\\\n\\\\t'#FF3366',\\\\n\\\\t'#FF3399',\\\\n\\\\t'#FF33CC',\\\\n\\\\t'#FF33FF',\\\\n\\\\t'#FF6600',\\\\n\\\\t'#FF6633',\\\\n\\\\t'#FF9900',\\\\n\\\\t'#FF9933',\\\\n\\\\t'#FFCC00',\\\\n\\\\t'#FFCC33'\\\\n];\\\\n\\\\n/**\\\\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\\\\n * and the Firebug extension (any Firefox version) are known\\\\n * to support \\\\\\\"%c\\\\\\\" CSS customizations.\\\\n *\\\\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\\\\n */\\\\n\\\\n// eslint-disable-next-line complexity\\\\nfunction useColors() {\\\\n\\\\t// NB: In an Electron preload script, document will be defined but not fully\\\\n\\\\t// initialized. Since we know we're in Chrome, we'll just detect this case\\\\n\\\\t// explicitly\\\\n\\\\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\\\\n\\\\t\\\\treturn true;\\\\n\\\\t}\\\\n\\\\n\\\\t// Internet Explorer and Edge do not support colors.\\\\n\\\\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\\\\\\\/(\\\\\\\\d+)/)) {\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\t// Is webkit? http://stackoverflow.com/a/16459606/376773\\\\n\\\\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\\\\n\\\\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\\\\n\\\\t\\\\t// Is firebug? http://stackoverflow.com/a/398120/376773\\\\n\\\\t\\\\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\\\\n\\\\t\\\\t// Is firefox >= v31?\\\\n\\\\t\\\\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\\\\n\\\\t\\\\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\\\\\\\/(\\\\\\\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\\\\n\\\\t\\\\t// Double check webkit in userAgent just in case we are in a worker\\\\n\\\\t\\\\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\\\\\\\/(\\\\\\\\d+)/));\\\\n}\\\\n\\\\n/**\\\\n * Colorize log arguments if enabled.\\\\n *\\\\n * @api public\\\\n */\\\\n\\\\nfunction formatArgs(args) {\\\\n\\\\targs[0] = (this.useColors ? '%c' : '') +\\\\n\\\\t\\\\tthis.namespace +\\\\n\\\\t\\\\t(this.useColors ? ' %c' : ' ') +\\\\n\\\\t\\\\targs[0] +\\\\n\\\\t\\\\t(this.useColors ? '%c ' : ' ') +\\\\n\\\\t\\\\t'+' + module.exports.humanize(this.diff);\\\\n\\\\n\\\\tif (!this.useColors) {\\\\n\\\\t\\\\treturn;\\\\n\\\\t}\\\\n\\\\n\\\\tconst c = 'color: ' + this.color;\\\\n\\\\targs.splice(1, 0, c, 'color: inherit');\\\\n\\\\n\\\\t// The final \\\\\\\"%c\\\\\\\" is somewhat tricky, because there could be other\\\\n\\\\t// arguments passed either before or after the %c, so we need to\\\\n\\\\t// figure out the correct index to insert the CSS into\\\\n\\\\tlet index = 0;\\\\n\\\\tlet lastC = 0;\\\\n\\\\targs[0].replace(/%[a-zA-Z%]/g, match => {\\\\n\\\\t\\\\tif (match === '%%') {\\\\n\\\\t\\\\t\\\\treturn;\\\\n\\\\t\\\\t}\\\\n\\\\t\\\\tindex++;\\\\n\\\\t\\\\tif (match === '%c') {\\\\n\\\\t\\\\t\\\\t// We only are interested in the *last* %c\\\\n\\\\t\\\\t\\\\t// (the user may have provided their own)\\\\n\\\\t\\\\t\\\\tlastC = index;\\\\n\\\\t\\\\t}\\\\n\\\\t});\\\\n\\\\n\\\\targs.splice(lastC, 0, c);\\\\n}\\\\n\\\\n/**\\\\n * Invokes `console.debug()` when available.\\\\n * No-op when `console.debug` is not a \\\\\\\"function\\\\\\\".\\\\n * If `console.debug` is not available, falls back\\\\n * to `console.log`.\\\\n *\\\\n * @api public\\\\n */\\\\nexports.log = console.debug || console.log || (() => {});\\\\n\\\\n/**\\\\n * Save `namespaces`.\\\\n *\\\\n * @param {String} namespaces\\\\n * @api private\\\\n */\\\\nfunction save(namespaces) {\\\\n\\\\ttry {\\\\n\\\\t\\\\tif (namespaces) {\\\\n\\\\t\\\\t\\\\texports.storage.setItem('debug', namespaces);\\\\n\\\\t\\\\t} else {\\\\n\\\\t\\\\t\\\\texports.storage.removeItem('debug');\\\\n\\\\t\\\\t}\\\\n\\\\t} catch (error) {\\\\n\\\\t\\\\t// Swallow\\\\n\\\\t\\\\t// XXX (@Qix-) should we be logging these?\\\\n\\\\t}\\\\n}\\\\n\\\\n/**\\\\n * Load `namespaces`.\\\\n *\\\\n * @return {String} returns the previously persisted debug modes\\\\n * @api private\\\\n */\\\\nfunction load() {\\\\n\\\\tlet r;\\\\n\\\\ttry {\\\\n\\\\t\\\\tr = exports.storage.getItem('debug');\\\\n\\\\t} catch (error) {\\\\n\\\\t\\\\t// Swallow\\\\n\\\\t\\\\t// XXX (@Qix-) should we be logging these?\\\\n\\\\t}\\\\n\\\\n\\\\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\\\\n\\\\tif (!r && typeof process !== 'undefined' && 'env' in process) {\\\\n\\\\t\\\\tr = process.env.DEBUG;\\\\n\\\\t}\\\\n\\\\n\\\\treturn r;\\\\n}\\\\n\\\\n/**\\\\n * Localstorage attempts to return the localstorage.\\\\n *\\\\n * This is necessary because safari throws\\\\n * when a user disables cookies/localstorage\\\\n * and you attempt to access it.\\\\n *\\\\n * @return {LocalStorage}\\\\n * @api private\\\\n */\\\\n\\\\nfunction localstorage() {\\\\n\\\\ttry {\\\\n\\\\t\\\\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\\\\n\\\\t\\\\t// The Browser also has localStorage in the global context.\\\\n\\\\t\\\\treturn localStorage;\\\\n\\\\t} catch (error) {\\\\n\\\\t\\\\t// Swallow\\\\n\\\\t\\\\t// XXX (@Qix-) should we be logging these?\\\\n\\\\t}\\\\n}\\\\n\\\\nmodule.exports = __webpack_require__(/*! ./common */ \\\\\\\"./node_modules/threads/node_modules/debug/src/common.js\\\\\\\")(exports);\\\\n\\\\nconst {formatters} = module.exports;\\\\n\\\\n/**\\\\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\\\\n */\\\\n\\\\nformatters.j = function (v) {\\\\n\\\\ttry {\\\\n\\\\t\\\\treturn JSON.stringify(v);\\\\n\\\\t} catch (error) {\\\\n\\\\t\\\\treturn '[UnexpectedJSONParseError]: ' + error.message;\\\\n\\\\t}\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/browser.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/node_modules/debug/src/common.js\\\":\\n/*!***************************************************************!*\\\\\\n !*** ./node_modules/threads/node_modules/debug/src/common.js ***!\\n \\\\***************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"\\\\n/**\\\\n * This is the common logic for both the Node.js and web browser\\\\n * implementations of `debug()`.\\\\n */\\\\n\\\\nfunction setup(env) {\\\\n\\\\tcreateDebug.debug = createDebug;\\\\n\\\\tcreateDebug.default = createDebug;\\\\n\\\\tcreateDebug.coerce = coerce;\\\\n\\\\tcreateDebug.disable = disable;\\\\n\\\\tcreateDebug.enable = enable;\\\\n\\\\tcreateDebug.enabled = enabled;\\\\n\\\\tcreateDebug.humanize = __webpack_require__(/*! ms */ \\\\\\\"./node_modules/threads/node_modules/ms/index.js\\\\\\\");\\\\n\\\\tcreateDebug.destroy = destroy;\\\\n\\\\n\\\\tObject.keys(env).forEach(key => {\\\\n\\\\t\\\\tcreateDebug[key] = env[key];\\\\n\\\\t});\\\\n\\\\n\\\\t/**\\\\n\\\\t* The currently active debug mode names, and names to skip.\\\\n\\\\t*/\\\\n\\\\n\\\\tcreateDebug.names = [];\\\\n\\\\tcreateDebug.skips = [];\\\\n\\\\n\\\\t/**\\\\n\\\\t* Map of special \\\\\\\"%n\\\\\\\" handling functions, for the debug \\\\\\\"format\\\\\\\" argument.\\\\n\\\\t*\\\\n\\\\t* Valid key names are a single, lower or upper-case letter, i.e. \\\\\\\"n\\\\\\\" and \\\\\\\"N\\\\\\\".\\\\n\\\\t*/\\\\n\\\\tcreateDebug.formatters = {};\\\\n\\\\n\\\\t/**\\\\n\\\\t* Selects a color for a debug namespace\\\\n\\\\t* @param {String} namespace The namespace string for the for the debug instance to be colored\\\\n\\\\t* @return {Number|String} An ANSI color code for the given namespace\\\\n\\\\t* @api private\\\\n\\\\t*/\\\\n\\\\tfunction selectColor(namespace) {\\\\n\\\\t\\\\tlet hash = 0;\\\\n\\\\n\\\\t\\\\tfor (let i = 0; i < namespace.length; i++) {\\\\n\\\\t\\\\t\\\\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\\\\n\\\\t\\\\t\\\\thash |= 0; // Convert to 32bit integer\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\\\\n\\\\t}\\\\n\\\\tcreateDebug.selectColor = selectColor;\\\\n\\\\n\\\\t/**\\\\n\\\\t* Create a debugger with the given `namespace`.\\\\n\\\\t*\\\\n\\\\t* @param {String} namespace\\\\n\\\\t* @return {Function}\\\\n\\\\t* @api public\\\\n\\\\t*/\\\\n\\\\tfunction createDebug(namespace) {\\\\n\\\\t\\\\tlet prevTime;\\\\n\\\\t\\\\tlet enableOverride = null;\\\\n\\\\t\\\\tlet namespacesCache;\\\\n\\\\t\\\\tlet enabledCache;\\\\n\\\\n\\\\t\\\\tfunction debug(...args) {\\\\n\\\\t\\\\t\\\\t// Disabled?\\\\n\\\\t\\\\t\\\\tif (!debug.enabled) {\\\\n\\\\t\\\\t\\\\t\\\\treturn;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\tconst self = debug;\\\\n\\\\n\\\\t\\\\t\\\\t// Set `diff` timestamp\\\\n\\\\t\\\\t\\\\tconst curr = Number(new Date());\\\\n\\\\t\\\\t\\\\tconst ms = curr - (prevTime || curr);\\\\n\\\\t\\\\t\\\\tself.diff = ms;\\\\n\\\\t\\\\t\\\\tself.prev = prevTime;\\\\n\\\\t\\\\t\\\\tself.curr = curr;\\\\n\\\\t\\\\t\\\\tprevTime = curr;\\\\n\\\\n\\\\t\\\\t\\\\targs[0] = createDebug.coerce(args[0]);\\\\n\\\\n\\\\t\\\\t\\\\tif (typeof args[0] !== 'string') {\\\\n\\\\t\\\\t\\\\t\\\\t// Anything else let's inspect with %O\\\\n\\\\t\\\\t\\\\t\\\\targs.unshift('%O');\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\t// Apply any `formatters` transformations\\\\n\\\\t\\\\t\\\\tlet index = 0;\\\\n\\\\t\\\\t\\\\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\\\\n\\\\t\\\\t\\\\t\\\\t// If we encounter an escaped % then don't increase the array index\\\\n\\\\t\\\\t\\\\t\\\\tif (match === '%%') {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\treturn '%';\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\t\\\\tindex++;\\\\n\\\\t\\\\t\\\\t\\\\tconst formatter = createDebug.formatters[format];\\\\n\\\\t\\\\t\\\\t\\\\tif (typeof formatter === 'function') {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tconst val = args[index];\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tmatch = formatter.call(self, val);\\\\n\\\\n\\\\t\\\\t\\\\t\\\\t\\\\t// Now we need to remove `args[index]` since it's inlined in the `format`\\\\n\\\\t\\\\t\\\\t\\\\t\\\\targs.splice(index, 1);\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tindex--;\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\t\\\\treturn match;\\\\n\\\\t\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\t\\\\t// Apply env-specific formatting (colors, etc.)\\\\n\\\\t\\\\t\\\\tcreateDebug.formatArgs.call(self, args);\\\\n\\\\n\\\\t\\\\t\\\\tconst logFn = self.log || createDebug.log;\\\\n\\\\t\\\\t\\\\tlogFn.apply(self, args);\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\tdebug.namespace = namespace;\\\\n\\\\t\\\\tdebug.useColors = createDebug.useColors();\\\\n\\\\t\\\\tdebug.color = createDebug.selectColor(namespace);\\\\n\\\\t\\\\tdebug.extend = extend;\\\\n\\\\t\\\\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\\\\n\\\\n\\\\t\\\\tObject.defineProperty(debug, 'enabled', {\\\\n\\\\t\\\\t\\\\tenumerable: true,\\\\n\\\\t\\\\t\\\\tconfigurable: false,\\\\n\\\\t\\\\t\\\\tget: () => {\\\\n\\\\t\\\\t\\\\t\\\\tif (enableOverride !== null) {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\treturn enableOverride;\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\t\\\\tif (namespacesCache !== createDebug.namespaces) {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tnamespacesCache = createDebug.namespaces;\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tenabledCache = createDebug.enabled(namespace);\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\t\\\\treturn enabledCache;\\\\n\\\\t\\\\t\\\\t},\\\\n\\\\t\\\\t\\\\tset: v => {\\\\n\\\\t\\\\t\\\\t\\\\tenableOverride = v;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\t// Env-specific initialization logic for debug instances\\\\n\\\\t\\\\tif (typeof createDebug.init === 'function') {\\\\n\\\\t\\\\t\\\\tcreateDebug.init(debug);\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\treturn debug;\\\\n\\\\t}\\\\n\\\\n\\\\tfunction extend(namespace, delimiter) {\\\\n\\\\t\\\\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\\\\n\\\\t\\\\tnewDebug.log = this.log;\\\\n\\\\t\\\\treturn newDebug;\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* Enables a debug mode by namespaces. This can include modes\\\\n\\\\t* separated by a colon and wildcards.\\\\n\\\\t*\\\\n\\\\t* @param {String} namespaces\\\\n\\\\t* @api public\\\\n\\\\t*/\\\\n\\\\tfunction enable(namespaces) {\\\\n\\\\t\\\\tcreateDebug.save(namespaces);\\\\n\\\\t\\\\tcreateDebug.namespaces = namespaces;\\\\n\\\\n\\\\t\\\\tcreateDebug.names = [];\\\\n\\\\t\\\\tcreateDebug.skips = [];\\\\n\\\\n\\\\t\\\\tlet i;\\\\n\\\\t\\\\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\\\\\\\s,]+/);\\\\n\\\\t\\\\tconst len = split.length;\\\\n\\\\n\\\\t\\\\tfor (i = 0; i < len; i++) {\\\\n\\\\t\\\\t\\\\tif (!split[i]) {\\\\n\\\\t\\\\t\\\\t\\\\t// ignore empty strings\\\\n\\\\t\\\\t\\\\t\\\\tcontinue;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\tnamespaces = split[i].replace(/\\\\\\\\*/g, '.*?');\\\\n\\\\n\\\\t\\\\t\\\\tif (namespaces[0] === '-') {\\\\n\\\\t\\\\t\\\\t\\\\tcreateDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));\\\\n\\\\t\\\\t\\\\t} else {\\\\n\\\\t\\\\t\\\\t\\\\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t}\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* Disable debug output.\\\\n\\\\t*\\\\n\\\\t* @return {String} namespaces\\\\n\\\\t* @api public\\\\n\\\\t*/\\\\n\\\\tfunction disable() {\\\\n\\\\t\\\\tconst namespaces = [\\\\n\\\\t\\\\t\\\\t...createDebug.names.map(toNamespace),\\\\n\\\\t\\\\t\\\\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\\\\n\\\\t\\\\t].join(',');\\\\n\\\\t\\\\tcreateDebug.enable('');\\\\n\\\\t\\\\treturn namespaces;\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* Returns true if the given mode name is enabled, false otherwise.\\\\n\\\\t*\\\\n\\\\t* @param {String} name\\\\n\\\\t* @return {Boolean}\\\\n\\\\t* @api public\\\\n\\\\t*/\\\\n\\\\tfunction enabled(name) {\\\\n\\\\t\\\\tif (name[name.length - 1] === '*') {\\\\n\\\\t\\\\t\\\\treturn true;\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\tlet i;\\\\n\\\\t\\\\tlet len;\\\\n\\\\n\\\\t\\\\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\\\\n\\\\t\\\\t\\\\tif (createDebug.skips[i].test(name)) {\\\\n\\\\t\\\\t\\\\t\\\\treturn false;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\\\\n\\\\t\\\\t\\\\tif (createDebug.names[i].test(name)) {\\\\n\\\\t\\\\t\\\\t\\\\treturn true;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\treturn false;\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* Convert regexp to namespace\\\\n\\\\t*\\\\n\\\\t* @param {RegExp} regxep\\\\n\\\\t* @return {String} namespace\\\\n\\\\t* @api private\\\\n\\\\t*/\\\\n\\\\tfunction toNamespace(regexp) {\\\\n\\\\t\\\\treturn regexp.toString()\\\\n\\\\t\\\\t\\\\t.substring(2, regexp.toString().length - 2)\\\\n\\\\t\\\\t\\\\t.replace(/\\\\\\\\.\\\\\\\\*\\\\\\\\?$/, '*');\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* Coerce `val`.\\\\n\\\\t*\\\\n\\\\t* @param {Mixed} val\\\\n\\\\t* @return {Mixed}\\\\n\\\\t* @api private\\\\n\\\\t*/\\\\n\\\\tfunction coerce(val) {\\\\n\\\\t\\\\tif (val instanceof Error) {\\\\n\\\\t\\\\t\\\\treturn val.stack || val.message;\\\\n\\\\t\\\\t}\\\\n\\\\t\\\\treturn val;\\\\n\\\\t}\\\\n\\\\n\\\\t/**\\\\n\\\\t* XXX DO NOT USE. This is a temporary stub function.\\\\n\\\\t* XXX It WILL be removed in the next major release.\\\\n\\\\t*/\\\\n\\\\tfunction destroy() {\\\\n\\\\t\\\\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\\\\n\\\\t}\\\\n\\\\n\\\\tcreateDebug.enable(createDebug.load());\\\\n\\\\n\\\\treturn createDebug;\\\\n}\\\\n\\\\nmodule.exports = setup;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/common.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/node_modules/debug/src/index.js\\\":\\n/*!**************************************************************!*\\\\\\n !*** ./node_modules/threads/node_modules/debug/src/index.js ***!\\n \\\\**************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"/**\\\\n * Detect Electron renderer / nwjs process, which is node, but we should\\\\n * treat as a browser.\\\\n */\\\\n\\\\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\\\\n\\\\tmodule.exports = __webpack_require__(/*! ./browser.js */ \\\\\\\"./node_modules/threads/node_modules/debug/src/browser.js\\\\\\\");\\\\n} else {\\\\n\\\\tmodule.exports = __webpack_require__(/*! ./node.js */ \\\\\\\"./node_modules/threads/node_modules/debug/src/node.js\\\\\\\");\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/node_modules/debug/src/node.js\\\":\\n/*!*************************************************************!*\\\\\\n !*** ./node_modules/threads/node_modules/debug/src/node.js ***!\\n \\\\*************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"/**\\\\n * Module dependencies.\\\\n */\\\\n\\\\nconst tty = __webpack_require__(/*! tty */ \\\\\\\"tty\\\\\\\");\\\\nconst util = __webpack_require__(/*! util */ \\\\\\\"util\\\\\\\");\\\\n\\\\n/**\\\\n * This is the Node.js implementation of `debug()`.\\\\n */\\\\n\\\\nexports.init = init;\\\\nexports.log = log;\\\\nexports.formatArgs = formatArgs;\\\\nexports.save = save;\\\\nexports.load = load;\\\\nexports.useColors = useColors;\\\\nexports.destroy = util.deprecate(\\\\n\\\\t() => {},\\\\n\\\\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\\\\n);\\\\n\\\\n/**\\\\n * Colors.\\\\n */\\\\n\\\\nexports.colors = [6, 2, 3, 4, 5, 1];\\\\n\\\\ntry {\\\\n\\\\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\\\\n\\\\t// eslint-disable-next-line import/no-extraneous-dependencies\\\\n\\\\tconst supportsColor = __webpack_require__(/*! supports-color */ \\\\\\\"./node_modules/supports-color/index.js\\\\\\\");\\\\n\\\\n\\\\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\\\\n\\\\t\\\\texports.colors = [\\\\n\\\\t\\\\t\\\\t20,\\\\n\\\\t\\\\t\\\\t21,\\\\n\\\\t\\\\t\\\\t26,\\\\n\\\\t\\\\t\\\\t27,\\\\n\\\\t\\\\t\\\\t32,\\\\n\\\\t\\\\t\\\\t33,\\\\n\\\\t\\\\t\\\\t38,\\\\n\\\\t\\\\t\\\\t39,\\\\n\\\\t\\\\t\\\\t40,\\\\n\\\\t\\\\t\\\\t41,\\\\n\\\\t\\\\t\\\\t42,\\\\n\\\\t\\\\t\\\\t43,\\\\n\\\\t\\\\t\\\\t44,\\\\n\\\\t\\\\t\\\\t45,\\\\n\\\\t\\\\t\\\\t56,\\\\n\\\\t\\\\t\\\\t57,\\\\n\\\\t\\\\t\\\\t62,\\\\n\\\\t\\\\t\\\\t63,\\\\n\\\\t\\\\t\\\\t68,\\\\n\\\\t\\\\t\\\\t69,\\\\n\\\\t\\\\t\\\\t74,\\\\n\\\\t\\\\t\\\\t75,\\\\n\\\\t\\\\t\\\\t76,\\\\n\\\\t\\\\t\\\\t77,\\\\n\\\\t\\\\t\\\\t78,\\\\n\\\\t\\\\t\\\\t79,\\\\n\\\\t\\\\t\\\\t80,\\\\n\\\\t\\\\t\\\\t81,\\\\n\\\\t\\\\t\\\\t92,\\\\n\\\\t\\\\t\\\\t93,\\\\n\\\\t\\\\t\\\\t98,\\\\n\\\\t\\\\t\\\\t99,\\\\n\\\\t\\\\t\\\\t112,\\\\n\\\\t\\\\t\\\\t113,\\\\n\\\\t\\\\t\\\\t128,\\\\n\\\\t\\\\t\\\\t129,\\\\n\\\\t\\\\t\\\\t134,\\\\n\\\\t\\\\t\\\\t135,\\\\n\\\\t\\\\t\\\\t148,\\\\n\\\\t\\\\t\\\\t149,\\\\n\\\\t\\\\t\\\\t160,\\\\n\\\\t\\\\t\\\\t161,\\\\n\\\\t\\\\t\\\\t162,\\\\n\\\\t\\\\t\\\\t163,\\\\n\\\\t\\\\t\\\\t164,\\\\n\\\\t\\\\t\\\\t165,\\\\n\\\\t\\\\t\\\\t166,\\\\n\\\\t\\\\t\\\\t167,\\\\n\\\\t\\\\t\\\\t168,\\\\n\\\\t\\\\t\\\\t169,\\\\n\\\\t\\\\t\\\\t170,\\\\n\\\\t\\\\t\\\\t171,\\\\n\\\\t\\\\t\\\\t172,\\\\n\\\\t\\\\t\\\\t173,\\\\n\\\\t\\\\t\\\\t178,\\\\n\\\\t\\\\t\\\\t179,\\\\n\\\\t\\\\t\\\\t184,\\\\n\\\\t\\\\t\\\\t185,\\\\n\\\\t\\\\t\\\\t196,\\\\n\\\\t\\\\t\\\\t197,\\\\n\\\\t\\\\t\\\\t198,\\\\n\\\\t\\\\t\\\\t199,\\\\n\\\\t\\\\t\\\\t200,\\\\n\\\\t\\\\t\\\\t201,\\\\n\\\\t\\\\t\\\\t202,\\\\n\\\\t\\\\t\\\\t203,\\\\n\\\\t\\\\t\\\\t204,\\\\n\\\\t\\\\t\\\\t205,\\\\n\\\\t\\\\t\\\\t206,\\\\n\\\\t\\\\t\\\\t207,\\\\n\\\\t\\\\t\\\\t208,\\\\n\\\\t\\\\t\\\\t209,\\\\n\\\\t\\\\t\\\\t214,\\\\n\\\\t\\\\t\\\\t215,\\\\n\\\\t\\\\t\\\\t220,\\\\n\\\\t\\\\t\\\\t221\\\\n\\\\t\\\\t];\\\\n\\\\t}\\\\n} catch (error) {\\\\n\\\\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\\\\n}\\\\n\\\\n/**\\\\n * Build up the default `inspectOpts` object from the environment variables.\\\\n *\\\\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\\\\n */\\\\n\\\\nexports.inspectOpts = Object.keys(process.env).filter(key => {\\\\n\\\\treturn /^debug_/i.test(key);\\\\n}).reduce((obj, key) => {\\\\n\\\\t// Camel-case\\\\n\\\\tconst prop = key\\\\n\\\\t\\\\t.substring(6)\\\\n\\\\t\\\\t.toLowerCase()\\\\n\\\\t\\\\t.replace(/_([a-z])/g, (_, k) => {\\\\n\\\\t\\\\t\\\\treturn k.toUpperCase();\\\\n\\\\t\\\\t});\\\\n\\\\n\\\\t// Coerce string value into JS value\\\\n\\\\tlet val = process.env[key];\\\\n\\\\tif (/^(yes|on|true|enabled)$/i.test(val)) {\\\\n\\\\t\\\\tval = true;\\\\n\\\\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\\\\n\\\\t\\\\tval = false;\\\\n\\\\t} else if (val === 'null') {\\\\n\\\\t\\\\tval = null;\\\\n\\\\t} else {\\\\n\\\\t\\\\tval = Number(val);\\\\n\\\\t}\\\\n\\\\n\\\\tobj[prop] = val;\\\\n\\\\treturn obj;\\\\n}, {});\\\\n\\\\n/**\\\\n * Is stdout a TTY? Colored output is enabled when `true`.\\\\n */\\\\n\\\\nfunction useColors() {\\\\n\\\\treturn 'colors' in exports.inspectOpts ?\\\\n\\\\t\\\\tBoolean(exports.inspectOpts.colors) :\\\\n\\\\t\\\\ttty.isatty(process.stderr.fd);\\\\n}\\\\n\\\\n/**\\\\n * Adds ANSI color escape codes if enabled.\\\\n *\\\\n * @api public\\\\n */\\\\n\\\\nfunction formatArgs(args) {\\\\n\\\\tconst {namespace: name, useColors} = this;\\\\n\\\\n\\\\tif (useColors) {\\\\n\\\\t\\\\tconst c = this.color;\\\\n\\\\t\\\\tconst colorCode = '\\\\\\\\u001B[3' + (c < 8 ? c : '8;5;' + c);\\\\n\\\\t\\\\tconst prefix = ` ${colorCode};1m${name} \\\\\\\\u001B[0m`;\\\\n\\\\n\\\\t\\\\targs[0] = prefix + args[0].split('\\\\\\\\n').join('\\\\\\\\n' + prefix);\\\\n\\\\t\\\\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\\\\\\\u001B[0m');\\\\n\\\\t} else {\\\\n\\\\t\\\\targs[0] = getDate() + name + ' ' + args[0];\\\\n\\\\t}\\\\n}\\\\n\\\\nfunction getDate() {\\\\n\\\\tif (exports.inspectOpts.hideDate) {\\\\n\\\\t\\\\treturn '';\\\\n\\\\t}\\\\n\\\\treturn new Date().toISOString() + ' ';\\\\n}\\\\n\\\\n/**\\\\n * Invokes `util.format()` with the specified arguments and writes to stderr.\\\\n */\\\\n\\\\nfunction log(...args) {\\\\n\\\\treturn process.stderr.write(util.format(...args) + '\\\\\\\\n');\\\\n}\\\\n\\\\n/**\\\\n * Save `namespaces`.\\\\n *\\\\n * @param {String} namespaces\\\\n * @api private\\\\n */\\\\nfunction save(namespaces) {\\\\n\\\\tif (namespaces) {\\\\n\\\\t\\\\tprocess.env.DEBUG = namespaces;\\\\n\\\\t} else {\\\\n\\\\t\\\\t// If you set a process.env field to null or undefined, it gets cast to the\\\\n\\\\t\\\\t// string 'null' or 'undefined'. Just delete instead.\\\\n\\\\t\\\\tdelete process.env.DEBUG;\\\\n\\\\t}\\\\n}\\\\n\\\\n/**\\\\n * Load `namespaces`.\\\\n *\\\\n * @return {String} returns the previously persisted debug modes\\\\n * @api private\\\\n */\\\\n\\\\nfunction load() {\\\\n\\\\treturn process.env.DEBUG;\\\\n}\\\\n\\\\n/**\\\\n * Init logic for `debug` instances.\\\\n *\\\\n * Create a new `inspectOpts` object in case `useColors` is set\\\\n * differently for a particular `debug` instance.\\\\n */\\\\n\\\\nfunction init(debug) {\\\\n\\\\tdebug.inspectOpts = {};\\\\n\\\\n\\\\tconst keys = Object.keys(exports.inspectOpts);\\\\n\\\\tfor (let i = 0; i < keys.length; i++) {\\\\n\\\\t\\\\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\\\\n\\\\t}\\\\n}\\\\n\\\\nmodule.exports = __webpack_require__(/*! ./common */ \\\\\\\"./node_modules/threads/node_modules/debug/src/common.js\\\\\\\")(exports);\\\\n\\\\nconst {formatters} = module.exports;\\\\n\\\\n/**\\\\n * Map %o to `util.inspect()`, all on a single line.\\\\n */\\\\n\\\\nformatters.o = function (v) {\\\\n\\\\tthis.inspectOpts.colors = this.useColors;\\\\n\\\\treturn util.inspect(v, this.inspectOpts)\\\\n\\\\t\\\\t.split('\\\\\\\\n')\\\\n\\\\t\\\\t.map(str => str.trim())\\\\n\\\\t\\\\t.join(' ');\\\\n};\\\\n\\\\n/**\\\\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\\\\n */\\\\n\\\\nformatters.O = function (v) {\\\\n\\\\tthis.inspectOpts.colors = this.useColors;\\\\n\\\\treturn util.inspect(v, this.inspectOpts);\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/debug/src/node.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/threads/node_modules/ms/index.js\\\":\\n/*!*******************************************************!*\\\\\\n !*** ./node_modules/threads/node_modules/ms/index.js ***!\\n \\\\*******************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"/**\\\\n * Helpers.\\\\n */\\\\n\\\\nvar s = 1000;\\\\nvar m = s * 60;\\\\nvar h = m * 60;\\\\nvar d = h * 24;\\\\nvar w = d * 7;\\\\nvar y = d * 365.25;\\\\n\\\\n/**\\\\n * Parse or format the given `val`.\\\\n *\\\\n * Options:\\\\n *\\\\n * - `long` verbose formatting [false]\\\\n *\\\\n * @param {String|Number} val\\\\n * @param {Object} [options]\\\\n * @throws {Error} throw an error if val is not a non-empty string or a number\\\\n * @return {String|Number}\\\\n * @api public\\\\n */\\\\n\\\\nmodule.exports = function(val, options) {\\\\n options = options || {};\\\\n var type = typeof val;\\\\n if (type === 'string' && val.length > 0) {\\\\n return parse(val);\\\\n } else if (type === 'number' && isFinite(val)) {\\\\n return options.long ? fmtLong(val) : fmtShort(val);\\\\n }\\\\n throw new Error(\\\\n 'val is not a non-empty string or a valid number. val=' +\\\\n JSON.stringify(val)\\\\n );\\\\n};\\\\n\\\\n/**\\\\n * Parse the given `str` and return milliseconds.\\\\n *\\\\n * @param {String} str\\\\n * @return {Number}\\\\n * @api private\\\\n */\\\\n\\\\nfunction parse(str) {\\\\n str = String(str);\\\\n if (str.length > 100) {\\\\n return;\\\\n }\\\\n var match = /^(-?(?:\\\\\\\\d+)?\\\\\\\\.?\\\\\\\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\\\\n str\\\\n );\\\\n if (!match) {\\\\n return;\\\\n }\\\\n var n = parseFloat(match[1]);\\\\n var type = (match[2] || 'ms').toLowerCase();\\\\n switch (type) {\\\\n case 'years':\\\\n case 'year':\\\\n case 'yrs':\\\\n case 'yr':\\\\n case 'y':\\\\n return n * y;\\\\n case 'weeks':\\\\n case 'week':\\\\n case 'w':\\\\n return n * w;\\\\n case 'days':\\\\n case 'day':\\\\n case 'd':\\\\n return n * d;\\\\n case 'hours':\\\\n case 'hour':\\\\n case 'hrs':\\\\n case 'hr':\\\\n case 'h':\\\\n return n * h;\\\\n case 'minutes':\\\\n case 'minute':\\\\n case 'mins':\\\\n case 'min':\\\\n case 'm':\\\\n return n * m;\\\\n case 'seconds':\\\\n case 'second':\\\\n case 'secs':\\\\n case 'sec':\\\\n case 's':\\\\n return n * s;\\\\n case 'milliseconds':\\\\n case 'millisecond':\\\\n case 'msecs':\\\\n case 'msec':\\\\n case 'ms':\\\\n return n;\\\\n default:\\\\n return undefined;\\\\n }\\\\n}\\\\n\\\\n/**\\\\n * Short format for `ms`.\\\\n *\\\\n * @param {Number} ms\\\\n * @return {String}\\\\n * @api private\\\\n */\\\\n\\\\nfunction fmtShort(ms) {\\\\n var msAbs = Math.abs(ms);\\\\n if (msAbs >= d) {\\\\n return Math.round(ms / d) + 'd';\\\\n }\\\\n if (msAbs >= h) {\\\\n return Math.round(ms / h) + 'h';\\\\n }\\\\n if (msAbs >= m) {\\\\n return Math.round(ms / m) + 'm';\\\\n }\\\\n if (msAbs >= s) {\\\\n return Math.round(ms / s) + 's';\\\\n }\\\\n return ms + 'ms';\\\\n}\\\\n\\\\n/**\\\\n * Long format for `ms`.\\\\n *\\\\n * @param {Number} ms\\\\n * @return {String}\\\\n * @api private\\\\n */\\\\n\\\\nfunction fmtLong(ms) {\\\\n var msAbs = Math.abs(ms);\\\\n if (msAbs >= d) {\\\\n return plural(ms, msAbs, d, 'day');\\\\n }\\\\n if (msAbs >= h) {\\\\n return plural(ms, msAbs, h, 'hour');\\\\n }\\\\n if (msAbs >= m) {\\\\n return plural(ms, msAbs, m, 'minute');\\\\n }\\\\n if (msAbs >= s) {\\\\n return plural(ms, msAbs, s, 'second');\\\\n }\\\\n return ms + ' ms';\\\\n}\\\\n\\\\n/**\\\\n * Pluralization helper.\\\\n */\\\\n\\\\nfunction plural(ms, msAbs, n, name) {\\\\n var isPlural = msAbs >= n * 1.5;\\\\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\\\\n}\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/threads/node_modules/ms/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/tiny-worker/lib/index.js\\\":\\n/*!***********************************************!*\\\\\\n !*** ./node_modules/tiny-worker/lib/index.js ***!\\n \\\\***********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"/* WEBPACK VAR INJECTION */(function(__dirname) {\\\\n\\\\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\\\\\\\"value\\\\\\\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\\\\n\\\\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\\\\\\\"Cannot call a class as a function\\\\\\\"); } }\\\\n\\\\nvar path = __webpack_require__(/*! path */ \\\\\\\"path\\\\\\\"),\\\\n fork = __webpack_require__(/*! child_process */ \\\\\\\"child_process\\\\\\\").fork,\\\\n worker = path.join(__dirname, \\\\\\\"worker.js\\\\\\\"),\\\\n events = /^(error|message)$/,\\\\n defaultPorts = { inspect: 9229, debug: 5858 };\\\\nvar range = { min: 1, max: 300 };\\\\n\\\\nvar Worker = function () {\\\\n\\\\tfunction Worker(arg) {\\\\n\\\\t\\\\tvar _this = this;\\\\n\\\\n\\\\t\\\\tvar args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];\\\\n\\\\t\\\\tvar options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : { cwd: process.cwd() };\\\\n\\\\n\\\\t\\\\t_classCallCheck(this, Worker);\\\\n\\\\n\\\\t\\\\tvar isfn = typeof arg === \\\\\\\"function\\\\\\\",\\\\n\\\\t\\\\t input = isfn ? arg.toString() : arg;\\\\n\\\\n\\\\t\\\\tif (!options.cwd) {\\\\n\\\\t\\\\t\\\\toptions.cwd = process.cwd();\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t//get all debug related parameters\\\\n\\\\t\\\\tvar debugVars = process.execArgv.filter(function (execArg) {\\\\n\\\\t\\\\t\\\\treturn (/(debug|inspect)/.test(execArg)\\\\n\\\\t\\\\t\\\\t);\\\\n\\\\t\\\\t});\\\\n\\\\t\\\\tif (debugVars.length > 0 && !options.noDebugRedirection) {\\\\n\\\\t\\\\t\\\\tif (!options.execArgv) {\\\\n\\\\t\\\\t\\\\t\\\\t//if no execArgs are given copy all arguments\\\\n\\\\t\\\\t\\\\t\\\\tdebugVars = Array.from(process.execArgv);\\\\n\\\\t\\\\t\\\\t\\\\toptions.execArgv = [];\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\tvar inspectIndex = debugVars.findIndex(function (debugArg) {\\\\n\\\\t\\\\t\\\\t\\\\t//get index of inspect parameter\\\\n\\\\t\\\\t\\\\t\\\\treturn (/^--inspect(-brk)?(=\\\\\\\\d+)?$/.test(debugArg)\\\\n\\\\t\\\\t\\\\t\\\\t);\\\\n\\\\t\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\t\\\\tvar debugIndex = debugVars.findIndex(function (debugArg) {\\\\n\\\\t\\\\t\\\\t\\\\t//get index of debug parameter\\\\n\\\\t\\\\t\\\\t\\\\treturn (/^--debug(-brk)?(=\\\\\\\\d+)?$/.test(debugArg)\\\\n\\\\t\\\\t\\\\t\\\\t);\\\\n\\\\t\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\t\\\\tvar portIndex = inspectIndex >= 0 ? inspectIndex : debugIndex; //get index of port, inspect has higher priority\\\\n\\\\n\\\\t\\\\t\\\\tif (portIndex >= 0) {\\\\n\\\\t\\\\t\\\\t\\\\tvar match = /^--(debug|inspect)(?:-brk)?(?:=(\\\\\\\\d+))?$/.exec(debugVars[portIndex]); //get port\\\\n\\\\t\\\\t\\\\t\\\\tvar port = defaultPorts[match[1]];\\\\n\\\\t\\\\t\\\\t\\\\tif (match[2]) {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tport = parseInt(match[2]);\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\t\\\\tdebugVars[portIndex] = \\\\\\\"--\\\\\\\" + match[1] + \\\\\\\"=\\\\\\\" + (port + range.min + Math.floor(Math.random() * (range.max - range.min))); //new parameter\\\\n\\\\n\\\\t\\\\t\\\\t\\\\tif (debugIndex >= 0 && debugIndex !== portIndex) {\\\\n\\\\t\\\\t\\\\t\\\\t\\\\t//remove \\\\\\\"-brk\\\\\\\" from debug if there\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tmatch = /^(--debug)(?:-brk)?(.*)/.exec(debugVars[debugIndex]);\\\\n\\\\t\\\\t\\\\t\\\\t\\\\tdebugVars[debugIndex] = match[1] + (match[2] ? match[2] : \\\\\\\"\\\\\\\");\\\\n\\\\t\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\toptions.execArgv = options.execArgv.concat(debugVars);\\\\n\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\tdelete options.noDebugRedirection;\\\\n\\\\n\\\\t\\\\tthis.child = fork(worker, args, options);\\\\n\\\\t\\\\tthis.onerror = undefined;\\\\n\\\\t\\\\tthis.onmessage = undefined;\\\\n\\\\n\\\\t\\\\tthis.child.on(\\\\\\\"error\\\\\\\", function (e) {\\\\n\\\\t\\\\t\\\\tif (_this.onerror) {\\\\n\\\\t\\\\t\\\\t\\\\t_this.onerror.call(_this, e);\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\tthis.child.on(\\\\\\\"message\\\\\\\", function (msg) {\\\\n\\\\t\\\\t\\\\tvar message = JSON.parse(msg);\\\\n\\\\t\\\\t\\\\tvar error = void 0;\\\\n\\\\n\\\\t\\\\t\\\\tif (!message.error && _this.onmessage) {\\\\n\\\\t\\\\t\\\\t\\\\t_this.onmessage.call(_this, message);\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\n\\\\t\\\\t\\\\tif (message.error && _this.onerror) {\\\\n\\\\t\\\\t\\\\t\\\\terror = new Error(message.error);\\\\n\\\\t\\\\t\\\\t\\\\terror.stack = message.stack;\\\\n\\\\n\\\\t\\\\t\\\\t\\\\t_this.onerror.call(_this, error);\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t});\\\\n\\\\n\\\\t\\\\tthis.child.send({ input: input, isfn: isfn, cwd: options.cwd, esm: options.esm });\\\\n\\\\t}\\\\n\\\\n\\\\t_createClass(Worker, [{\\\\n\\\\t\\\\tkey: \\\\\\\"addEventListener\\\\\\\",\\\\n\\\\t\\\\tvalue: function addEventListener(event, fn) {\\\\n\\\\t\\\\t\\\\tif (events.test(event)) {\\\\n\\\\t\\\\t\\\\t\\\\tthis[\\\\\\\"on\\\\\\\" + event] = fn;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t}\\\\n\\\\t}, {\\\\n\\\\t\\\\tkey: \\\\\\\"postMessage\\\\\\\",\\\\n\\\\t\\\\tvalue: function postMessage(msg) {\\\\n\\\\t\\\\t\\\\tthis.child.send(JSON.stringify({ data: msg }, null, 0));\\\\n\\\\t\\\\t}\\\\n\\\\t}, {\\\\n\\\\t\\\\tkey: \\\\\\\"terminate\\\\\\\",\\\\n\\\\t\\\\tvalue: function terminate() {\\\\n\\\\t\\\\t\\\\tthis.child.kill(\\\\\\\"SIGINT\\\\\\\");\\\\n\\\\t\\\\t}\\\\n\\\\t}], [{\\\\n\\\\t\\\\tkey: \\\\\\\"setRange\\\\\\\",\\\\n\\\\t\\\\tvalue: function setRange(min, max) {\\\\n\\\\t\\\\t\\\\tif (min >= max) {\\\\n\\\\t\\\\t\\\\t\\\\treturn false;\\\\n\\\\t\\\\t\\\\t}\\\\n\\\\t\\\\t\\\\trange.min = min;\\\\n\\\\t\\\\t\\\\trange.max = max;\\\\n\\\\n\\\\t\\\\t\\\\treturn true;\\\\n\\\\t\\\\t}\\\\n\\\\t}]);\\\\n\\\\n\\\\treturn Worker;\\\\n}();\\\\n\\\\nmodule.exports = Worker;\\\\n\\\\n/* WEBPACK VAR INJECTION */}.call(this, \\\\\\\"/\\\\\\\"))\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/tiny-worker/lib/index.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/txml/node_modules/through2/through2.js\\\":\\n/*!*************************************************************!*\\\\\\n !*** ./node_modules/txml/node_modules/through2/through2.js ***!\\n \\\\*************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"var Transform = __webpack_require__(/*! readable-stream */ \\\\\\\"./node_modules/readable-stream/readable.js\\\\\\\").Transform\\\\n , inherits = __webpack_require__(/*! inherits */ \\\\\\\"./node_modules/inherits/inherits.js\\\\\\\")\\\\n\\\\nfunction DestroyableTransform(opts) {\\\\n Transform.call(this, opts)\\\\n this._destroyed = false\\\\n}\\\\n\\\\ninherits(DestroyableTransform, Transform)\\\\n\\\\nDestroyableTransform.prototype.destroy = function(err) {\\\\n if (this._destroyed) return\\\\n this._destroyed = true\\\\n \\\\n var self = this\\\\n process.nextTick(function() {\\\\n if (err)\\\\n self.emit('error', err)\\\\n self.emit('close')\\\\n })\\\\n}\\\\n\\\\n// a noop _transform function\\\\nfunction noop (chunk, enc, callback) {\\\\n callback(null, chunk)\\\\n}\\\\n\\\\n\\\\n// create a new export function, used by both the main export and\\\\n// the .ctor export, contains common logic for dealing with arguments\\\\nfunction through2 (construct) {\\\\n return function (options, transform, flush) {\\\\n if (typeof options == 'function') {\\\\n flush = transform\\\\n transform = options\\\\n options = {}\\\\n }\\\\n\\\\n if (typeof transform != 'function')\\\\n transform = noop\\\\n\\\\n if (typeof flush != 'function')\\\\n flush = null\\\\n\\\\n return construct(options, transform, flush)\\\\n }\\\\n}\\\\n\\\\n\\\\n// main export, just make me a transform stream!\\\\nmodule.exports = through2(function (options, transform, flush) {\\\\n var t2 = new DestroyableTransform(options)\\\\n\\\\n t2._transform = transform\\\\n\\\\n if (flush)\\\\n t2._flush = flush\\\\n\\\\n return t2\\\\n})\\\\n\\\\n\\\\n// make me a reusable prototype that I can `new`, or implicitly `new`\\\\n// with a constructor call\\\\nmodule.exports.ctor = through2(function (options, transform, flush) {\\\\n function Through2 (override) {\\\\n if (!(this instanceof Through2))\\\\n return new Through2(override)\\\\n\\\\n this.options = Object.assign({}, options, override)\\\\n\\\\n DestroyableTransform.call(this, this.options)\\\\n }\\\\n\\\\n inherits(Through2, DestroyableTransform)\\\\n\\\\n Through2.prototype._transform = transform\\\\n\\\\n if (flush)\\\\n Through2.prototype._flush = flush\\\\n\\\\n return Through2\\\\n})\\\\n\\\\n\\\\nmodule.exports.obj = through2(function (options, transform, flush) {\\\\n var t2 = new DestroyableTransform(Object.assign({ objectMode: true, highWaterMark: 16 }, options))\\\\n\\\\n t2._transform = transform\\\\n\\\\n if (flush)\\\\n t2._flush = flush\\\\n\\\\n return t2\\\\n})\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/txml/node_modules/through2/through2.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/txml/tXml.js\\\":\\n/*!***********************************!*\\\\\\n !*** ./node_modules/txml/tXml.js ***!\\n \\\\***********************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"// ==ClosureCompiler==\\\\n// @output_file_name default.js\\\\n// @compilation_level SIMPLE_OPTIMIZATIONS\\\\n// ==/ClosureCompiler==\\\\n\\\\n/**\\\\n * @author: Tobias Nickel\\\\n * @created: 06.04.2015\\\\n * I needed a small xmlparser chat can be used in a worker.\\\\n */\\\\n\\\\n/**\\\\n * @typedef tNode \\\\n * @property {string} tagName \\\\n * @property {object} [attributes] \\\\n * @property {tNode|string|number[]} children \\\\n **/\\\\n\\\\n/**\\\\n * parseXML / html into a DOM Object. with no validation and some failur tolerance\\\\n * @param {string} S your XML to parse\\\\n * @param options {object} all other options:\\\\n * searchId {string} the id of a single element, that should be returned. using this will increase the speed rapidly\\\\n * filter {function} filter method, as you know it from Array.filter. but is goes throw the DOM.\\\\n\\\\n * @return {tNode[]}\\\\n */\\\\nfunction tXml(S, options) {\\\\n \\\\\\\"use strict\\\\\\\";\\\\n options = options || {};\\\\n\\\\n var pos = options.pos || 0;\\\\n\\\\n var openBracket = \\\\\\\"<\\\\\\\";\\\\n var openBracketCC = \\\\\\\"<\\\\\\\".charCodeAt(0);\\\\n var closeBracket = \\\\\\\">\\\\\\\";\\\\n var closeBracketCC = \\\\\\\">\\\\\\\".charCodeAt(0);\\\\n var minus = \\\\\\\"-\\\\\\\";\\\\n var minusCC = \\\\\\\"-\\\\\\\".charCodeAt(0);\\\\n var slash = \\\\\\\"/\\\\\\\";\\\\n var slashCC = \\\\\\\"/\\\\\\\".charCodeAt(0);\\\\n var exclamation = '!';\\\\n var exclamationCC = '!'.charCodeAt(0);\\\\n var singleQuote = \\\\\\\"'\\\\\\\";\\\\n var singleQuoteCC = \\\\\\\"'\\\\\\\".charCodeAt(0);\\\\n var doubleQuote = '\\\\\\\"';\\\\n var doubleQuoteCC = '\\\\\\\"'.charCodeAt(0);\\\\n\\\\n /**\\\\n * parsing a list of entries\\\\n */\\\\n function parseChildren() {\\\\n var children = [];\\\\n while (S[pos]) {\\\\n if (S.charCodeAt(pos) == openBracketCC) {\\\\n if (S.charCodeAt(pos + 1) === slashCC) {\\\\n pos = S.indexOf(closeBracket, pos);\\\\n if (pos + 1) pos += 1\\\\n return children;\\\\n } else if (S.charCodeAt(pos + 1) === exclamationCC) {\\\\n if (S.charCodeAt(pos + 2) == minusCC) {\\\\n //comment support\\\\n while (pos !== -1 && !(S.charCodeAt(pos) === closeBracketCC && S.charCodeAt(pos - 1) == minusCC && S.charCodeAt(pos - 2) == minusCC && pos != -1)) {\\\\n pos = S.indexOf(closeBracket, pos + 1);\\\\n }\\\\n if (pos === -1) {\\\\n pos = S.length\\\\n }\\\\n } else {\\\\n // doctypesupport\\\\n pos += 2;\\\\n while (S.charCodeAt(pos) !== closeBracketCC && S[pos]) {\\\\n pos++;\\\\n }\\\\n }\\\\n pos++;\\\\n continue;\\\\n }\\\\n var node = parseNode();\\\\n children.push(node);\\\\n } else {\\\\n var text = parseText()\\\\n if (text.trim().length > 0)\\\\n children.push(text);\\\\n pos++;\\\\n }\\\\n }\\\\n return children;\\\\n }\\\\n\\\\n /**\\\\n * returns the text outside of texts until the first '<'\\\\n */\\\\n function parseText() {\\\\n var start = pos;\\\\n pos = S.indexOf(openBracket, pos) - 1;\\\\n if (pos === -2)\\\\n pos = S.length;\\\\n return S.slice(start, pos + 1);\\\\n }\\\\n /**\\\\n * returns text until the first nonAlphebetic letter\\\\n */\\\\n var nameSpacer = '\\\\\\\\n\\\\\\\\t>/= ';\\\\n\\\\n function parseName() {\\\\n var start = pos;\\\\n while (nameSpacer.indexOf(S[pos]) === -1 && S[pos]) {\\\\n pos++;\\\\n }\\\\n return S.slice(start, pos);\\\\n }\\\\n /**\\\\n * is parsing a node, including tagName, Attributes and its children,\\\\n * to parse children it uses the parseChildren again, that makes the parsing recursive\\\\n */\\\\n var NoChildNodes = options.noChildNodes || ['img', 'br', 'input', 'meta', 'link'];\\\\n\\\\n function parseNode() {\\\\n pos++;\\\\n const tagName = parseName();\\\\n const attributes = {};\\\\n let children = [];\\\\n\\\\n // parsing attributes\\\\n while (S.charCodeAt(pos) !== closeBracketCC && S[pos]) {\\\\n var c = S.charCodeAt(pos);\\\\n if ((c > 64 && c < 91) || (c > 96 && c < 123)) {\\\\n //if('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'.indexOf(S[pos])!==-1 ){\\\\n var name = parseName();\\\\n // search beginning of the string\\\\n var code = S.charCodeAt(pos);\\\\n while (code && code !== singleQuoteCC && code !== doubleQuoteCC && !((code > 64 && code < 91) || (code > 96 && code < 123)) && code !== closeBracketCC) {\\\\n pos++;\\\\n code = S.charCodeAt(pos);\\\\n }\\\\n if (code === singleQuoteCC || code === doubleQuoteCC) {\\\\n var value = parseString();\\\\n if (pos === -1) {\\\\n return {\\\\n tagName,\\\\n attributes,\\\\n children,\\\\n };\\\\n }\\\\n } else {\\\\n value = null;\\\\n pos--;\\\\n }\\\\n attributes[name] = value;\\\\n }\\\\n pos++;\\\\n }\\\\n // optional parsing of children\\\\n if (S.charCodeAt(pos - 1) !== slashCC) {\\\\n if (tagName == \\\\\\\"script\\\\\\\") {\\\\n var start = pos + 1;\\\\n pos = S.indexOf('</script>', pos);\\\\n children = [S.slice(start, pos - 1)];\\\\n pos += 9;\\\\n } else if (tagName == \\\\\\\"style\\\\\\\") {\\\\n var start = pos + 1;\\\\n pos = S.indexOf('</style>', pos);\\\\n children = [S.slice(start, pos - 1)];\\\\n pos += 8;\\\\n } else if (NoChildNodes.indexOf(tagName) == -1) {\\\\n pos++;\\\\n children = parseChildren(name);\\\\n }\\\\n } else {\\\\n pos++;\\\\n }\\\\n return {\\\\n tagName,\\\\n attributes,\\\\n children,\\\\n };\\\\n }\\\\n\\\\n /**\\\\n * is parsing a string, that starts with a char and with the same usually ' or \\\\\\\"\\\\n */\\\\n\\\\n function parseString() {\\\\n var startChar = S[pos];\\\\n var startpos = ++pos;\\\\n pos = S.indexOf(startChar, startpos)\\\\n return S.slice(startpos, pos);\\\\n }\\\\n\\\\n /**\\\\n *\\\\n */\\\\n function findElements() {\\\\n var r = new RegExp('\\\\\\\\\\\\\\\\s' + options.attrName + '\\\\\\\\\\\\\\\\s*=[\\\\\\\\'\\\\\\\"]' + options.attrValue + '[\\\\\\\\'\\\\\\\"]').exec(S)\\\\n if (r) {\\\\n return r.index;\\\\n } else {\\\\n return -1;\\\\n }\\\\n }\\\\n\\\\n var out = null;\\\\n if (options.attrValue !== undefined) {\\\\n options.attrName = options.attrName || 'id';\\\\n var out = [];\\\\n\\\\n while ((pos = findElements()) !== -1) {\\\\n pos = S.lastIndexOf('<', pos);\\\\n if (pos !== -1) {\\\\n out.push(parseNode());\\\\n }\\\\n S = S.substr(pos);\\\\n pos = 0;\\\\n }\\\\n } else if (options.parseNode) {\\\\n out = parseNode()\\\\n } else {\\\\n out = parseChildren();\\\\n }\\\\n\\\\n if (options.filter) {\\\\n out = tXml.filter(out, options.filter);\\\\n }\\\\n\\\\n if (options.setPos) {\\\\n out.pos = pos;\\\\n }\\\\n\\\\n return out;\\\\n}\\\\n\\\\n/**\\\\n * transform the DomObject to an object that is like the object of PHPs simplexmp_load_*() methods.\\\\n * this format helps you to write that is more likely to keep your programm working, even if there a small changes in the XML schema.\\\\n * be aware, that it is not possible to reproduce the original xml from a simplified version, because the order of elements is not saved.\\\\n * therefore your programm will be more flexible and easyer to read.\\\\n *\\\\n * @param {tNode[]} children the childrenList\\\\n */\\\\ntXml.simplify = function simplify(children) {\\\\n var out = {};\\\\n if (!children.length) {\\\\n return '';\\\\n }\\\\n\\\\n if (children.length === 1 && typeof children[0] == 'string') {\\\\n return children[0];\\\\n }\\\\n // map each object\\\\n children.forEach(function(child) {\\\\n if (typeof child !== 'object') {\\\\n return;\\\\n }\\\\n if (!out[child.tagName])\\\\n out[child.tagName] = [];\\\\n var kids = tXml.simplify(child.children||[]);\\\\n out[child.tagName].push(kids);\\\\n if (child.attributes) {\\\\n kids._attributes = child.attributes;\\\\n }\\\\n });\\\\n\\\\n for (var i in out) {\\\\n if (out[i].length == 1) {\\\\n out[i] = out[i][0];\\\\n }\\\\n }\\\\n\\\\n return out;\\\\n};\\\\n\\\\n/**\\\\n * behaves the same way as Array.filter, if the filter method return true, the element is in the resultList\\\\n * @params children{Array} the children of a node\\\\n * @param f{function} the filter method\\\\n */\\\\ntXml.filter = function(children, f) {\\\\n var out = [];\\\\n children.forEach(function(child) {\\\\n if (typeof(child) === 'object' && f(child)) out.push(child);\\\\n if (child.children) {\\\\n var kids = tXml.filter(child.children, f);\\\\n out = out.concat(kids);\\\\n }\\\\n });\\\\n return out;\\\\n};\\\\n\\\\n/**\\\\n * stringify a previously parsed string object.\\\\n * this is useful,\\\\n * 1. to remove whitespaces\\\\n * 2. to recreate xml data, with some changed data.\\\\n * @param {tNode} O the object to Stringify\\\\n */\\\\ntXml.stringify = function TOMObjToXML(O) {\\\\n var out = '';\\\\n\\\\n function writeChildren(O) {\\\\n if (O)\\\\n for (var i = 0; i < O.length; i++) {\\\\n if (typeof O[i] == 'string') {\\\\n out += O[i].trim();\\\\n } else {\\\\n writeNode(O[i]);\\\\n }\\\\n }\\\\n }\\\\n\\\\n function writeNode(N) {\\\\n out += \\\\\\\"<\\\\\\\" + N.tagName;\\\\n for (var i in N.attributes) {\\\\n if (N.attributes[i] === null) {\\\\n out += ' ' + i;\\\\n } else if (N.attributes[i].indexOf('\\\\\\\"') === -1) {\\\\n out += ' ' + i + '=\\\\\\\"' + N.attributes[i].trim() + '\\\\\\\"';\\\\n } else {\\\\n out += ' ' + i + \\\\\\\"='\\\\\\\" + N.attributes[i].trim() + \\\\\\\"'\\\\\\\";\\\\n }\\\\n }\\\\n out += '>';\\\\n writeChildren(N.children);\\\\n out += '</' + N.tagName + '>';\\\\n }\\\\n writeChildren(O);\\\\n\\\\n return out;\\\\n};\\\\n\\\\n\\\\n/**\\\\n * use this method to read the textcontent, of some node.\\\\n * It is great if you have mixed content like:\\\\n * this text has some <b>big</b> text and a <a href=''>link</a>\\\\n * @return {string}\\\\n */\\\\ntXml.toContentString = function(tDom) {\\\\n if (Array.isArray(tDom)) {\\\\n var out = '';\\\\n tDom.forEach(function(e) {\\\\n out += ' ' + tXml.toContentString(e);\\\\n out = out.trim();\\\\n });\\\\n return out;\\\\n } else if (typeof tDom === 'object') {\\\\n return tXml.toContentString(tDom.children)\\\\n } else {\\\\n return ' ' + tDom;\\\\n }\\\\n};\\\\n\\\\ntXml.getElementById = function(S, id, simplified) {\\\\n var out = tXml(S, {\\\\n attrValue: id\\\\n });\\\\n return simplified ? tXml.simplify(out) : out[0];\\\\n};\\\\n/**\\\\n * A fast parsing method, that not realy finds by classname,\\\\n * more: the class attribute contains XXX\\\\n * @param\\\\n */\\\\ntXml.getElementsByClassName = function(S, classname, simplified) {\\\\n const out = tXml(S, {\\\\n attrName: 'class',\\\\n attrValue: '[a-zA-Z0-9\\\\\\\\-\\\\\\\\s ]*' + classname + '[a-zA-Z0-9\\\\\\\\-\\\\\\\\s ]*'\\\\n });\\\\n return simplified ? tXml.simplify(out) : out;\\\\n};\\\\n\\\\ntXml.parseStream = function(stream, offset) {\\\\n if (typeof offset === 'string') {\\\\n offset = offset.length + 2;\\\\n }\\\\n if (typeof stream === 'string') {\\\\n var fs = __webpack_require__(/*! fs */ \\\\\\\"fs\\\\\\\");\\\\n stream = fs.createReadStream(stream, { start: offset });\\\\n offset = 0;\\\\n }\\\\n\\\\n var position = offset;\\\\n var data = '';\\\\n stream.on('data', function(chunk) {\\\\n data += chunk;\\\\n var lastPos = 0;\\\\n do {\\\\n position = data.indexOf('<', position) + 1;\\\\n if(!position) {\\\\n position = lastPos;\\\\n return;\\\\n }\\\\n if (data[position + 1] === '/') {\\\\n position = position + 1;\\\\n lastPos = pos;\\\\n continue;\\\\n }\\\\n var res = tXml(data, { pos: position-1, parseNode: true, setPos: true });\\\\n position = res.pos;\\\\n if (position > (data.length - 1) || position < lastPos) {\\\\n data = data.slice(lastPos);\\\\n position = 0;\\\\n lastPos = 0;\\\\n return;\\\\n } else {\\\\n stream.emit('xml', res);\\\\n lastPos = position;\\\\n }\\\\n } while (1);\\\\n });\\\\n stream.on('end', function() {\\\\n console.log('end')\\\\n });\\\\n return stream;\\\\n}\\\\n\\\\ntXml.transformStream = function (offset) {\\\\n // require through here, so it will not get added to webpack/browserify\\\\n const through2 = __webpack_require__(/*! through2 */ \\\\\\\"./node_modules/txml/node_modules/through2/through2.js\\\\\\\");\\\\n if (typeof offset === 'string') {\\\\n offset = offset.length + 2;\\\\n }\\\\n\\\\n var position = offset || 0;\\\\n var data = '';\\\\n const stream = through2({ readableObjectMode: true }, function (chunk, enc, callback) {\\\\n data += chunk;\\\\n var lastPos = 0;\\\\n do {\\\\n position = data.indexOf('<', position) + 1;\\\\n if (!position) {\\\\n position = lastPos;\\\\n return callback();;\\\\n }\\\\n if (data[position + 1] === '/') {\\\\n position = position + 1;\\\\n lastPos = pos;\\\\n continue;\\\\n }\\\\n var res = tXml(data, { pos: position - 1, parseNode: true, setPos: true });\\\\n position = res.pos;\\\\n if (position > (data.length - 1) || position < lastPos) {\\\\n data = data.slice(lastPos);\\\\n position = 0;\\\\n lastPos = 0;\\\\n return callback();;\\\\n } else {\\\\n this.push(res);\\\\n lastPos = position;\\\\n }\\\\n } while (1);\\\\n callback();\\\\n });\\\\n\\\\n return stream;\\\\n}\\\\n\\\\nif (true) {\\\\n module.exports = tXml;\\\\n tXml.xml = tXml;\\\\n}\\\\n//console.clear();\\\\n//console.log('here:',tXml.getElementById('<some><xml id=\\\\\\\"test\\\\\\\">dada</xml><that id=\\\\\\\"test\\\\\\\">value</that></some>','test'));\\\\n//console.log('here:',tXml.getElementsByClassName('<some><xml id=\\\\\\\"test\\\\\\\" class=\\\\\\\"sdf test jsalf\\\\\\\">dada</xml><that id=\\\\\\\"test\\\\\\\">value</that></some>','test'));\\\\n\\\\n/*\\\\nconsole.clear();\\\\ntXml(d,'content');\\\\n //some testCode\\\\nvar s = document.body.innerHTML.toLowerCase();\\\\nvar start = new Date().getTime();\\\\nvar o = tXml(s,'content');\\\\nvar end = new Date().getTime();\\\\n//console.log(JSON.stringify(o,undefined,'\\\\\\\\t'));\\\\nconsole.log(\\\\\\\"MILLISECONDS\\\\\\\",end-start);\\\\nvar nodeCount=document.querySelectorAll('*').length;\\\\nconsole.log('node count',nodeCount);\\\\nconsole.log(\\\\\\\"speed:\\\\\\\",(1000/(end-start))*nodeCount,'Nodes / second')\\\\n//console.log(JSON.stringify(tXml('<html><head><title>testPage</title></head><body><h1>TestPage</h1><p>this is a <b>test</b>page</p></body></html>'),undefined,'\\\\\\\\t'));\\\\nvar p = new DOMParser();\\\\nvar s2='<body>'+s+'</body>'\\\\nvar start2= new Date().getTime();\\\\nvar o2 = p.parseFromString(s2,'text/html').querySelector('#content')\\\\nvar end2=new Date().getTime();\\\\nconsole.log(\\\\\\\"MILLISECONDS\\\\\\\",end2-start2);\\\\n// */\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/txml/tXml.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/util-deprecate/node.js\\\":\\n/*!*********************************************!*\\\\\\n !*** ./node_modules/util-deprecate/node.js ***!\\n \\\\*********************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\neval(\\\"\\\\n/**\\\\n * For Node.js, simply re-export the core `util.deprecate` function.\\\\n */\\\\n\\\\nmodule.exports = __webpack_require__(/*! util */ \\\\\\\"util\\\\\\\").deprecate;\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/util-deprecate/node.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./node_modules/worker-loader/dist/workers/InlineWorker.js\\\":\\n/*!*****************************************************************!*\\\\\\n !*** ./node_modules/worker-loader/dist/workers/InlineWorker.js ***!\\n \\\\*****************************************************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\n// http://stackoverflow.com/questions/10343913/how-to-create-a-web-worker-from-a-string\\\\n\\\\nvar URL = window.URL || window.webkitURL;\\\\n\\\\nmodule.exports = function (content, url) {\\\\n try {\\\\n try {\\\\n var blob;\\\\n\\\\n try {\\\\n // BlobBuilder = Deprecated, but widely implemented\\\\n var BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder;\\\\n\\\\n blob = new BlobBuilder();\\\\n\\\\n blob.append(content);\\\\n\\\\n blob = blob.getBlob();\\\\n } catch (e) {\\\\n // The proposed API\\\\n blob = new Blob([content]);\\\\n }\\\\n\\\\n return new Worker(URL.createObjectURL(blob));\\\\n } catch (e) {\\\\n return new Worker('data:application/javascript,' + encodeURIComponent(content));\\\\n }\\\\n } catch (e) {\\\\n if (!url) {\\\\n throw Error('Inline worker is not supported');\\\\n }\\\\n\\\\n return new Worker(url);\\\\n }\\\\n};\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./node_modules/worker-loader/dist/workers/InlineWorker.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./src/parseData.js\\\":\\n/*!**************************!*\\\\\\n !*** ./src/parseData.js ***!\\n \\\\**************************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nObject.defineProperty(exports, \\\\\\\"__esModule\\\\\\\", {\\\\n value: true\\\\n});\\\\n\\\\nvar _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i[\\\\\\\"return\\\\\\\"]) _i[\\\\\\\"return\\\\\\\"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError(\\\\\\\"Invalid attempt to destructure non-iterable instance\\\\\\\"); } }; }();\\\\n\\\\nvar _typeof = typeof Symbol === \\\\\\\"function\\\\\\\" && typeof Symbol.iterator === \\\\\\\"symbol\\\\\\\" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === \\\\\\\"function\\\\\\\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \\\\\\\"symbol\\\\\\\" : typeof obj; };\\\\n\\\\nexports.default = parseData;\\\\n\\\\nvar _geotiff = __webpack_require__(/*! geotiff */ \\\\\\\"./node_modules/geotiff/src/geotiff.js\\\\\\\");\\\\n\\\\nvar _geotiffPalette = __webpack_require__(/*! geotiff-palette */ \\\\\\\"./node_modules/geotiff-palette/index.js\\\\\\\");\\\\n\\\\nvar _utils = __webpack_require__(/*! ./utils.js */ \\\\\\\"./src/utils.js\\\\\\\");\\\\n\\\\nfunction processResult(result, debug) {\\\\n var noDataValue = result.noDataValue;\\\\n var height = result.height;\\\\n var width = result.width;\\\\n\\\\n return new Promise(function (resolve, reject) {\\\\n result.maxs = [];\\\\n result.mins = [];\\\\n result.ranges = [];\\\\n\\\\n var max = void 0;var min = void 0;\\\\n\\\\n // console.log(\\\\\\\"starting to get min, max and ranges\\\\\\\");\\\\n for (var rasterIndex = 0; rasterIndex < result.numberOfRasters; rasterIndex++) {\\\\n var rows = result.values[rasterIndex];\\\\n if (debug) console.log('[georaster] rows:', rows);\\\\n\\\\n for (var rowIndex = 0; rowIndex < height; rowIndex++) {\\\\n var row = rows[rowIndex];\\\\n\\\\n for (var columnIndex = 0; columnIndex < width; columnIndex++) {\\\\n var value = row[columnIndex];\\\\n if (value != noDataValue && !isNaN(value)) {\\\\n if (typeof min === 'undefined' || value < min) min = value;else if (typeof max === 'undefined' || value > max) max = value;\\\\n }\\\\n }\\\\n }\\\\n\\\\n result.maxs.push(max);\\\\n result.mins.push(min);\\\\n result.ranges.push(max - min);\\\\n }\\\\n\\\\n resolve(result);\\\\n });\\\\n}\\\\n\\\\n/* We're not using async because trying to avoid dependency on babel's polyfill\\\\nThere can be conflicts when GeoRaster is used in another project that is also\\\\nusing @babel/polyfill */\\\\nfunction parseData(data, debug) {\\\\n return new Promise(function (resolve, reject) {\\\\n try {\\\\n if (debug) console.log('starting parseData with', data);\\\\n if (debug) console.log('\\\\\\\\tGeoTIFF:', typeof GeoTIFF === 'undefined' ? 'undefined' : _typeof(GeoTIFF));\\\\n\\\\n var result = {};\\\\n\\\\n var height = void 0,\\\\n width = void 0;\\\\n\\\\n if (data.rasterType === 'object') {\\\\n result.values = data.data;\\\\n result.height = height = data.metadata.height || result.values[0].length;\\\\n result.width = width = data.metadata.width || result.values[0][0].length;\\\\n result.pixelHeight = data.metadata.pixelHeight;\\\\n result.pixelWidth = data.metadata.pixelWidth;\\\\n result.projection = data.metadata.projection;\\\\n result.xmin = data.metadata.xmin;\\\\n result.ymax = data.metadata.ymax;\\\\n result.noDataValue = data.metadata.noDataValue;\\\\n result.numberOfRasters = result.values.length;\\\\n result.xmax = result.xmin + result.width * result.pixelWidth;\\\\n result.ymin = result.ymax - result.height * result.pixelHeight;\\\\n result._data = null;\\\\n resolve(processResult(result));\\\\n } else if (data.rasterType === 'geotiff') {\\\\n result._data = data.data;\\\\n\\\\n var initFunction = _geotiff.fromArrayBuffer;\\\\n if (data.sourceType === 'url') {\\\\n initFunction = _geotiff.fromUrl;\\\\n }\\\\n\\\\n if (debug) console.log('data.rasterType is geotiff');\\\\n resolve(initFunction(data.data).then(function (geotiff) {\\\\n if (debug) console.log('geotiff:', geotiff);\\\\n return geotiff.getImage().then(function (image) {\\\\n try {\\\\n if (debug) console.log('image:', image);\\\\n\\\\n var fileDirectory = image.fileDirectory;\\\\n\\\\n var _image$getGeoKeys = image.getGeoKeys(),\\\\n GeographicTypeGeoKey = _image$getGeoKeys.GeographicTypeGeoKey,\\\\n ProjectedCSTypeGeoKey = _image$getGeoKeys.ProjectedCSTypeGeoKey;\\\\n\\\\n result.projection = ProjectedCSTypeGeoKey || GeographicTypeGeoKey;\\\\n if (debug) console.log('projection:', result.projection);\\\\n\\\\n result.height = height = image.getHeight();\\\\n if (debug) console.log('result.height:', result.height);\\\\n result.width = width = image.getWidth();\\\\n if (debug) console.log('result.width:', result.width);\\\\n\\\\n var _image$getResolution = image.getResolution(),\\\\n _image$getResolution2 = _slicedToArray(_image$getResolution, 2),\\\\n resolutionX = _image$getResolution2[0],\\\\n resolutionY = _image$getResolution2[1];\\\\n\\\\n result.pixelHeight = Math.abs(resolutionY);\\\\n result.pixelWidth = Math.abs(resolutionX);\\\\n\\\\n var _image$getOrigin = image.getOrigin(),\\\\n _image$getOrigin2 = _slicedToArray(_image$getOrigin, 2),\\\\n originX = _image$getOrigin2[0],\\\\n originY = _image$getOrigin2[1];\\\\n\\\\n result.xmin = originX;\\\\n result.xmax = result.xmin + width * result.pixelWidth;\\\\n result.ymax = originY;\\\\n result.ymin = result.ymax - height * result.pixelHeight;\\\\n\\\\n result.noDataValue = fileDirectory.GDAL_NODATA ? parseFloat(fileDirectory.GDAL_NODATA) : null;\\\\n\\\\n result.numberOfRasters = fileDirectory.SamplesPerPixel;\\\\n\\\\n if (fileDirectory.ColorMap) {\\\\n result.palette = (0, _geotiffPalette.getPalette)(image);\\\\n }\\\\n\\\\n if (data.sourceType !== 'url') {\\\\n return image.readRasters().then(function (rasters) {\\\\n result.values = rasters.map(function (valuesInOneDimension) {\\\\n return (0, _utils.unflatten)(valuesInOneDimension, { height: height, width: width });\\\\n });\\\\n return processResult(result);\\\\n });\\\\n } else {\\\\n return result;\\\\n }\\\\n } catch (error) {\\\\n reject(error);\\\\n console.error('[georaster] error parsing georaster:', error);\\\\n }\\\\n });\\\\n }));\\\\n }\\\\n } catch (error) {\\\\n reject(error);\\\\n console.error('[georaster] error parsing georaster:', error);\\\\n }\\\\n });\\\\n}\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./src/parseData.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./src/utils.js\\\":\\n/*!**********************!*\\\\\\n !*** ./src/utils.js ***!\\n \\\\**********************/\\n/*! no static exports found */\\n/***/ (function(module, exports, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"\\\\n\\\\nfunction countIn1D(array) {\\\\n return array.reduce(function (counts, value) {\\\\n if (counts[value] === undefined) {\\\\n counts[value] = 1;\\\\n } else {\\\\n counts[value]++;\\\\n }\\\\n return counts;\\\\n }, {});\\\\n}\\\\n\\\\nfunction countIn2D(rows) {\\\\n return rows.reduce(function (counts, values) {\\\\n values.forEach(function (value) {\\\\n if (counts[value] === undefined) {\\\\n counts[value] = 1;\\\\n } else {\\\\n counts[value]++;\\\\n }\\\\n });\\\\n return counts;\\\\n }, {});\\\\n}\\\\n\\\\n/*\\\\nTakes in a flattened one dimensional array\\\\nrepresenting two-dimensional pixel values\\\\nand returns an array of arrays.\\\\n*/\\\\nfunction unflatten(valuesInOneDimension, size) {\\\\n var height = size.height,\\\\n width = size.width;\\\\n\\\\n var valuesInTwoDimensions = [];\\\\n for (var y = 0; y < height; y++) {\\\\n var start = y * width;\\\\n var end = start + width;\\\\n valuesInTwoDimensions.push(valuesInOneDimension.slice(start, end));\\\\n }\\\\n return valuesInTwoDimensions;\\\\n}\\\\n\\\\nmodule.exports = { countIn1D: countIn1D, countIn2D: countIn2D, unflatten: unflatten };\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./src/utils.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"./src/worker.js\\\":\\n/*!***********************!*\\\\\\n !*** ./src/worker.js ***!\\n \\\\***********************/\\n/*! no exports provided */\\n/***/ (function(module, __webpack_exports__, __webpack_require__) {\\n\\n\\\"use strict\\\";\\neval(\\\"__webpack_require__.r(__webpack_exports__);\\\\n/* harmony import */ var _parseData_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./parseData.js */ \\\\\\\"./src/parseData.js\\\\\\\");\\\\n/* harmony import */ var _parseData_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_parseData_js__WEBPACK_IMPORTED_MODULE_0__);\\\\n\\\\n\\\\n// this is a bit of a hack to trick geotiff to work with web worker\\\\n// eslint-disable-next-line no-unused-vars\\\\nconst window = self;\\\\n\\\\nonmessage = e => {\\\\n const data = e.data;\\\\n _parseData_js__WEBPACK_IMPORTED_MODULE_0___default()(data).then(result => {\\\\n if (result._data instanceof ArrayBuffer) {\\\\n postMessage(result, [result._data]);\\\\n } else {\\\\n postMessage(result);\\\\n }\\\\n close();\\\\n });\\\\n};\\\\n\\\\n\\\\n//# sourceURL=webpack://GeoRaster/./src/worker.js?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"buffer\\\":\\n/*!*************************!*\\\\\\n !*** external \\\"buffer\\\" ***!\\n \\\\*************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"buffer\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22buffer%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"child_process\\\":\\n/*!********************************!*\\\\\\n !*** external \\\"child_process\\\" ***!\\n \\\\********************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"child_process\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22child_process%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"events\\\":\\n/*!*************************!*\\\\\\n !*** external \\\"events\\\" ***!\\n \\\\*************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"events\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22events%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"fs\\\":\\n/*!*********************!*\\\\\\n !*** external \\\"fs\\\" ***!\\n \\\\*********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"fs\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22fs%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"http\\\":\\n/*!***********************!*\\\\\\n !*** external \\\"http\\\" ***!\\n \\\\***********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"http\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22http%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"https\\\":\\n/*!************************!*\\\\\\n !*** external \\\"https\\\" ***!\\n \\\\************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"https\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22https%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"os\\\":\\n/*!*********************!*\\\\\\n !*** external \\\"os\\\" ***!\\n \\\\*********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"os\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22os%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"path\\\":\\n/*!***********************!*\\\\\\n !*** external \\\"path\\\" ***!\\n \\\\***********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"path\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22path%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"stream\\\":\\n/*!*************************!*\\\\\\n !*** external \\\"stream\\\" ***!\\n \\\\*************************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"stream\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22stream%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"tty\\\":\\n/*!**********************!*\\\\\\n !*** external \\\"tty\\\" ***!\\n \\\\**********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"tty\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22tty%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"url\\\":\\n/*!**********************!*\\\\\\n !*** external \\\"url\\\" ***!\\n \\\\**********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"url\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22url%22?\\\");\\n\\n/***/ }),\\n\\n/***/ \\\"util\\\":\\n/*!***********************!*\\\\\\n !*** external \\\"util\\\" ***!\\n \\\\***********************/\\n/*! no static exports found */\\n/***/ (function(module, exports) {\\n\\neval(\\\"module.exports = require(\\\\\\\"util\\\\\\\");\\\\n\\\\n//# sourceURL=webpack://GeoRaster/external_%22util%22?\\\");\\n\\n/***/ })\\n\\n/******/ });\",null);};\n\n//# sourceURL=webpack://GeoRaster/./src/worker.js?");
1409
1410/***/ }),
1411
1412/***/ "buffer":
1413/*!*************************!*\
1414 !*** external "buffer" ***!
1415 \*************************/
1416/*! no static exports found */
1417/***/ (function(module, exports) {
1418
1419eval("module.exports = require(\"buffer\");\n\n//# sourceURL=webpack://GeoRaster/external_%22buffer%22?");
1420
1421/***/ }),
1422
1423/***/ "child_process":
1424/*!********************************!*\
1425 !*** external "child_process" ***!
1426 \********************************/
1427/*! no static exports found */
1428/***/ (function(module, exports) {
1429
1430eval("module.exports = require(\"child_process\");\n\n//# sourceURL=webpack://GeoRaster/external_%22child_process%22?");
1431
1432/***/ }),
1433
1434/***/ "events":
1435/*!*************************!*\
1436 !*** external "events" ***!
1437 \*************************/
1438/*! no static exports found */
1439/***/ (function(module, exports) {
1440
1441eval("module.exports = require(\"events\");\n\n//# sourceURL=webpack://GeoRaster/external_%22events%22?");
1442
1443/***/ }),
1444
1445/***/ "fs":
1446/*!*********************!*\
1447 !*** external "fs" ***!
1448 \*********************/
1449/*! no static exports found */
1450/***/ (function(module, exports) {
1451
1452eval("module.exports = require(\"fs\");\n\n//# sourceURL=webpack://GeoRaster/external_%22fs%22?");
1453
1454/***/ }),
1455
1456/***/ "http":
1457/*!***********************!*\
1458 !*** external "http" ***!
1459 \***********************/
1460/*! no static exports found */
1461/***/ (function(module, exports) {
1462
1463eval("module.exports = require(\"http\");\n\n//# sourceURL=webpack://GeoRaster/external_%22http%22?");
1464
1465/***/ }),
1466
1467/***/ "https":
1468/*!************************!*\
1469 !*** external "https" ***!
1470 \************************/
1471/*! no static exports found */
1472/***/ (function(module, exports) {
1473
1474eval("module.exports = require(\"https\");\n\n//# sourceURL=webpack://GeoRaster/external_%22https%22?");
1475
1476/***/ }),
1477
1478/***/ "os":
1479/*!*********************!*\
1480 !*** external "os" ***!
1481 \*********************/
1482/*! no static exports found */
1483/***/ (function(module, exports) {
1484
1485eval("module.exports = require(\"os\");\n\n//# sourceURL=webpack://GeoRaster/external_%22os%22?");
1486
1487/***/ }),
1488
1489/***/ "path":
1490/*!***********************!*\
1491 !*** external "path" ***!
1492 \***********************/
1493/*! no static exports found */
1494/***/ (function(module, exports) {
1495
1496eval("module.exports = require(\"path\");\n\n//# sourceURL=webpack://GeoRaster/external_%22path%22?");
1497
1498/***/ }),
1499
1500/***/ "stream":
1501/*!*************************!*\
1502 !*** external "stream" ***!
1503 \*************************/
1504/*! no static exports found */
1505/***/ (function(module, exports) {
1506
1507eval("module.exports = require(\"stream\");\n\n//# sourceURL=webpack://GeoRaster/external_%22stream%22?");
1508
1509/***/ }),
1510
1511/***/ "tty":
1512/*!**********************!*\
1513 !*** external "tty" ***!
1514 \**********************/
1515/*! no static exports found */
1516/***/ (function(module, exports) {
1517
1518eval("module.exports = require(\"tty\");\n\n//# sourceURL=webpack://GeoRaster/external_%22tty%22?");
1519
1520/***/ }),
1521
1522/***/ "url":
1523/*!**********************!*\
1524 !*** external "url" ***!
1525 \**********************/
1526/*! no static exports found */
1527/***/ (function(module, exports) {
1528
1529eval("module.exports = require(\"url\");\n\n//# sourceURL=webpack://GeoRaster/external_%22url%22?");
1530
1531/***/ }),
1532
1533/***/ "util":
1534/*!***********************!*\
1535 !*** external "util" ***!
1536 \***********************/
1537/*! no static exports found */
1538/***/ (function(module, exports) {
1539
1540eval("module.exports = require(\"util\");\n\n//# sourceURL=webpack://GeoRaster/external_%22util%22?");
1541
1542/***/ }),
1543
1544/***/ "zlib":
1545/*!***********************!*\
1546 !*** external "zlib" ***!
1547 \***********************/
1548/*! no static exports found */
1549/***/ (function(module, exports) {
1550
1551eval("module.exports = require(\"zlib\");\n\n//# sourceURL=webpack://GeoRaster/external_%22zlib%22?");
1552
1553/***/ })
1554
1555/******/ });
1556});
\No newline at end of file